From 0d8d38d01ce6ed52e999f0b6cc1157156d67a820 Mon Sep 17 00:00:00 2001 From: Matt Hauff Date: Wed, 25 Sep 2024 11:56:34 -0700 Subject: [PATCH 01/69] [CHIA-1306] Tweak `TransactionEndpointRequest` to require TX args (#18601) * Tweak `TransactionEndpointRequest` to require TX args * whoops * Add an option around the ban --- .../wallet/nft_wallet/test_nft_wallet.py | 6 ++-- chia/rpc/wallet_request_types.py | 22 +++++++++++- chia/rpc/wallet_rpc_client.py | 34 +++++++++++++++---- 3 files changed, 53 insertions(+), 9 deletions(-) diff --git a/chia/_tests/wallet/nft_wallet/test_nft_wallet.py b/chia/_tests/wallet/nft_wallet/test_nft_wallet.py index 26c9ba2bc877..a3b726c2d33d 100644 --- a/chia/_tests/wallet/nft_wallet/test_nft_wallet.py +++ b/chia/_tests/wallet/nft_wallet/test_nft_wallet.py @@ -1734,7 +1734,8 @@ async def test_nft_bulk_set_did(wallet_environments: WalletTestFramework) -> Non ] fee = uint64(1000) set_did_bulk_resp = await env.rpc_client.set_nft_did_bulk( - NFTSetDIDBulk(did_id=hmr_did_id, nft_coin_list=nft_coin_list, fee=fee, push=True) + NFTSetDIDBulk(did_id=hmr_did_id, nft_coin_list=nft_coin_list, fee=fee, push=True), + wallet_environments.tx_config, ) assert len(set_did_bulk_resp.spend_bundle.coin_spends) == 5 assert set_did_bulk_resp.tx_num == 5 # 1 for each NFT being spent (3), 1 for fee tx, 1 for did tx @@ -2031,7 +2032,8 @@ async def test_nft_bulk_transfer(wallet_environments: WalletTestFramework) -> No fee = uint64(1000) address = encode_puzzle_hash(await wallet_1.get_puzzle_hash(new=False), AddressType.XCH.hrp(env_1.node.config)) bulk_transfer_resp = await env_0.rpc_client.transfer_nft_bulk( - NFTTransferBulk(target_address=address, nft_coin_list=nft_coin_list, fee=fee, push=True) + NFTTransferBulk(target_address=address, nft_coin_list=nft_coin_list, fee=fee, push=True), + wallet_environments.tx_config, ) assert len(bulk_transfer_resp.spend_bundle.coin_spends) == 4 assert bulk_transfer_resp.tx_num == 4 diff --git a/chia/rpc/wallet_request_types.py b/chia/rpc/wallet_request_types.py index 5de7d93213b7..701a0f984d5c 100644 --- a/chia/rpc/wallet_request_types.py +++ b/chia/rpc/wallet_request_types.py @@ -4,7 +4,7 @@ import sys from dataclasses import dataclass, field -from typing import Any, Dict, List, Optional, Type, TypeVar +from typing import Any, Dict, List, Optional, Tuple, Type, TypeVar from chia_rs import G1Element, G2Element, PrivateKey from typing_extensions import dataclass_transform @@ -12,6 +12,7 @@ from chia.types.blockchain_format.sized_bytes import bytes32 from chia.util.ints import uint16, uint32, uint64 from chia.util.streamable import Streamable, streamable +from chia.wallet.conditions import Condition, ConditionValidTimes from chia.wallet.notification_store import Notification from chia.wallet.signer_protocol import ( SignedTransaction, @@ -24,6 +25,7 @@ from chia.wallet.trading.offer import Offer from chia.wallet.transaction_record import TransactionRecord from chia.wallet.util.clvm_streamable import json_deserialize_with_clvm_streamable +from chia.wallet.util.tx_config import TXConfig from chia.wallet.vc_wallet.vc_store import VCRecord from chia.wallet.wallet_spend_bundle import WalletSpendBundle @@ -384,6 +386,24 @@ class TransactionEndpointRequest(Streamable): fee: uint64 = uint64(0) push: Optional[bool] = None + def to_json_dict(self, _avoid_ban: bool = False) -> Dict[str, Any]: + if not _avoid_ban: + raise NotImplementedError( + "to_json_dict is banned on TransactionEndpointRequest, please use .json_serialize_for_transport" + ) + else: + return super().to_json_dict() + + def json_serialize_for_transport( + self, tx_config: TXConfig, extra_conditions: Tuple[Condition, ...], timelock_info: ConditionValidTimes + ) -> Dict[str, Any]: + return { + **tx_config.to_json_dict(), + **timelock_info.to_json_dict(), + "extra_conditions": [condition.to_json_dict() for condition in extra_conditions], + **self.to_json_dict(_avoid_ban=True), + } + @streamable @dataclass(frozen=True) diff --git a/chia/rpc/wallet_rpc_client.py b/chia/rpc/wallet_rpc_client.py index 65dd169b96aa..28347fc7bbf0 100644 --- a/chia/rpc/wallet_rpc_client.py +++ b/chia/rpc/wallet_rpc_client.py @@ -1190,11 +1190,31 @@ async def nft_mint_bulk( response = await self.fetch("nft_mint_bulk", request) return json_deserialize_with_clvm_streamable(response, NFTMintBulkResponse) - async def set_nft_did_bulk(self, request: NFTSetDIDBulk) -> NFTSetDIDBulkResponse: - return NFTSetDIDBulkResponse.from_json_dict(await self.fetch("nft_set_did_bulk", request.to_json_dict())) + async def set_nft_did_bulk( + self, + request: NFTSetDIDBulk, + tx_config: TXConfig, + extra_conditions: Tuple[Condition, ...] = tuple(), + timelock_info: ConditionValidTimes = ConditionValidTimes(), + ) -> NFTSetDIDBulkResponse: + return NFTSetDIDBulkResponse.from_json_dict( + await self.fetch( + "nft_set_did_bulk", request.json_serialize_for_transport(tx_config, extra_conditions, timelock_info) + ) + ) - async def transfer_nft_bulk(self, request: NFTTransferBulk) -> NFTTransferBulkResponse: - return NFTTransferBulkResponse.from_json_dict(await self.fetch("nft_transfer_bulk", request.to_json_dict())) + async def transfer_nft_bulk( + self, + request: NFTTransferBulk, + tx_config: TXConfig, + extra_conditions: Tuple[Condition, ...] = tuple(), + timelock_info: ConditionValidTimes = ConditionValidTimes(), + ) -> NFTTransferBulkResponse: + return NFTTransferBulkResponse.from_json_dict( + await self.fetch( + "nft_transfer_bulk", request.json_serialize_for_transport(tx_config, extra_conditions, timelock_info) + ) + ) # DataLayer async def create_new_dl( @@ -1801,11 +1821,12 @@ async def split_coins( self, args: SplitCoins, tx_config: TXConfig, + extra_conditions: Tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), ) -> SplitCoinsResponse: return SplitCoinsResponse.from_json_dict( await self.fetch( - "split_coins", {**args.to_json_dict(), **tx_config.to_json_dict(), **timelock_info.to_json_dict()} + "split_coins", args.json_serialize_for_transport(tx_config, extra_conditions, timelock_info) ) ) @@ -1813,10 +1834,11 @@ async def combine_coins( self, args: CombineCoins, tx_config: TXConfig, + extra_conditions: Tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), ) -> CombineCoinsResponse: return CombineCoinsResponse.from_json_dict( await self.fetch( - "combine_coins", {**args.to_json_dict(), **tx_config.to_json_dict(), **timelock_info.to_json_dict()} + "combine_coins", args.json_serialize_for_transport(tx_config, extra_conditions, timelock_info) ) ) From e3aac5ba3e186d1a9562788104fc52b183c033e5 Mon Sep 17 00:00:00 2001 From: Kyle Altendorf Date: Wed, 25 Sep 2024 14:56:54 -0400 Subject: [PATCH 02/69] Revert "CHIA-645 Optimize build ancestors table. (#18100)" (#18609) * Revert "CHIA-645 Optimize build ancestors table. (#18100)" This reverts commit 9b7bb246cb94866def8377e1f246c015d7e145ad. * restore tests --- chia/data_layer/data_store.py | 49 ++++++++++++++--------------------- 1 file changed, 20 insertions(+), 29 deletions(-) diff --git a/chia/data_layer/data_store.py b/chia/data_layer/data_store.py index c9919485ac91..9fa25e1ad539 100644 --- a/chia/data_layer/data_store.py +++ b/chia/data_layer/data_store.py @@ -1737,41 +1737,32 @@ async def _get_one_ancestor_multiple_hashes( return [InternalNode.from_row(row=row) for row in rows] async def build_ancestor_table_for_latest_root(self, store_id: bytes32) -> None: - async with self.db_wrapper.writer() as writer: + async with self.db_wrapper.writer(): root = await self.get_tree_root(store_id=store_id) if root.node_hash is None: return + previous_root = await self.get_tree_root( + store_id=store_id, + generation=max(root.generation - 1, 0), + ) - await writer.execute( - """ - WITH RECURSIVE tree_from_root_hash AS ( - SELECT - node.hash, - node.left, - node.right, - NULL AS ancestor - FROM node - WHERE node.hash = :root_hash - UNION ALL - SELECT - node.hash, - node.left, - node.right, - tree_from_root_hash.hash AS ancestor - FROM node - JOIN tree_from_root_hash ON node.hash = tree_from_root_hash.left - OR node.hash = tree_from_root_hash.right + if previous_root.node_hash is not None: + previous_internal_nodes: List[InternalNode] = await self.get_internal_nodes( + store_id=store_id, + root_hash=previous_root.node_hash, ) - INSERT OR REPLACE INTO ancestors (hash, ancestor, tree_id, generation) - SELECT - tree_from_root_hash.hash, - tree_from_root_hash.ancestor, - :tree_id, - :generation - FROM tree_from_root_hash - """, - {"root_hash": root.node_hash, "tree_id": store_id, "generation": root.generation}, + known_hashes: Set[bytes32] = {node.hash for node in previous_internal_nodes} + else: + known_hashes = set() + internal_nodes: List[InternalNode] = await self.get_internal_nodes( + store_id=store_id, + root_hash=root.node_hash, ) + for node in internal_nodes: + # We already have the same values in ancestor tables, if we have the same internal node. + # Don't reinsert it so we can save DB space. + if node.hash not in known_hashes: + await self._insert_ancestor_table(node.left_hash, node.right_hash, store_id, root.generation) async def insert_root_with_ancestor_table( self, store_id: bytes32, node_hash: Optional[bytes32], status: Status = Status.PENDING From 44db523f9d11c4123303019ce878f9ab8d47ced6 Mon Sep 17 00:00:00 2001 From: Earle Lowe <30607889+emlowe@users.noreply.github.com> Date: Wed, 25 Sep 2024 14:58:30 -0700 Subject: [PATCH 03/69] [CHIA-1486] Update to macOS 13 for build and test (#18640) Update to macOS 13 for build and test --- .github/workflows/build-macos-installers.yml | 8 ++------ .github/workflows/check_wheel_availability.yaml | 2 +- .github/workflows/pre-commit.yml | 2 +- .github/workflows/test-install-scripts.yml | 2 +- .github/workflows/test.yml | 2 +- .github/workflows/upload-pypi-source.yml | 2 +- 6 files changed, 7 insertions(+), 11 deletions(-) diff --git a/.github/workflows/build-macos-installers.yml b/.github/workflows/build-macos-installers.yml index 196d08554ce3..13227c61b736 100644 --- a/.github/workflows/build-macos-installers.yml +++ b/.github/workflows/build-macos-installers.yml @@ -51,7 +51,7 @@ jobs: matrix: python-version: ["3.10"] os: - - runs-on: macos-12 + - runs-on: macos-13 name: intel bladebit-suffix: macos-x86-64.tar.gz arch-artifact-name: intel @@ -83,7 +83,7 @@ jobs: uses: Chia-Network/actions/setjobenv@main env: GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - MACOSX_DEPLOYMENT_TARGET: 12 + MACOSX_DEPLOYMENT_TARGET: 13 - name: Test for secrets access id: check_secrets @@ -296,10 +296,6 @@ jobs: fail-fast: false matrix: os: - - name: 12 - matrix: 12 - runs-on: - intel: macos-12 - name: 13 matrix: 13 runs-on: diff --git a/.github/workflows/check_wheel_availability.yaml b/.github/workflows/check_wheel_availability.yaml index ca124c4ad03f..b4f84f8d46fc 100644 --- a/.github/workflows/check_wheel_availability.yaml +++ b/.github/workflows/check_wheel_availability.yaml @@ -32,7 +32,7 @@ jobs: - name: macOS matrix: macos runs-on: - intel: macos-12 + intel: macos-13 arm: macos-13-arm64 - name: Windows matrix: windows diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml index 34676baaa35b..a6a6aad61482 100644 --- a/.github/workflows/pre-commit.yml +++ b/.github/workflows/pre-commit.yml @@ -29,7 +29,7 @@ jobs: - name: macOS matrix: macos runs-on: - intel: macos-12 + intel: macos-13 arm: macos-13-arm64 - name: Windows matrix: windows diff --git a/.github/workflows/test-install-scripts.yml b/.github/workflows/test-install-scripts.yml index 7f2c76ee12e2..8ca09d811cd3 100644 --- a/.github/workflows/test-install-scripts.yml +++ b/.github/workflows/test-install-scripts.yml @@ -32,7 +32,7 @@ jobs: os: - runs-on: macos-latest matrix: macos-arm - - runs-on: macos-12 + - runs-on: macos-13 matrix: macos-intel - runs-on: ubuntu-latest matrix: linux diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 11c06aa60534..99b602d5b7c4 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -120,7 +120,7 @@ jobs: concurrency-name: macos-intel configuration: ${{ needs.configure.outputs.configuration }} matrix_mode: ${{ needs.configure.outputs.matrix_mode }} - runs-on: macos-12 + runs-on: macos-13 arch: intel macos-arm: if: github.event_name != 'workflow_dispatch' || inputs.run-macos-arm diff --git a/.github/workflows/upload-pypi-source.yml b/.github/workflows/upload-pypi-source.yml index 9ba6139e1352..764f3cbb83b0 100644 --- a/.github/workflows/upload-pypi-source.yml +++ b/.github/workflows/upload-pypi-source.yml @@ -43,7 +43,7 @@ jobs: matrix: macos emoji: 🍎 runs-on: - intel: macos-12 + intel: macos-13 arm: macos-13-arm64 - name: Windows matrix: windows From 4383b6f9304abae3b52f4dd514bcd7c99e9e0be5 Mon Sep 17 00:00:00 2001 From: Amine Khaldi Date: Thu, 26 Sep 2024 14:15:29 +0100 Subject: [PATCH 04/69] CHIA-1465 Simplify double spend validation in validate_block_body (#18628) Simplify double spend validation in validate_block_body. --- chia/consensus/block_body_validation.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/chia/consensus/block_body_validation.py b/chia/consensus/block_body_validation.py index c7f4081dff6b..bb42e87434db 100644 --- a/chia/consensus/block_body_validation.py +++ b/chia/consensus/block_body_validation.py @@ -367,8 +367,8 @@ async def validate_block_body( # 14. Check for duplicate spends inside block removal_counter = collections.Counter(removals) - for k, v in removal_counter.items(): - if v > 1: + for count in removal_counter.values(): + if count > 1: return Err.DOUBLE_SPEND, None # 15. Check if removals exist and were not previously spent. (unspent_db + diff_store + this_block) From 0764c8da336f7180dca36a76019e2efa9414aee2 Mon Sep 17 00:00:00 2001 From: Arvid Norberg Date: Thu, 26 Sep 2024 17:55:26 +0200 Subject: [PATCH 05/69] Bump chia rs 0.14.0 (#18643) * bump chia_rs to 0.14.0 * update tests covering negative division (allowed since the hard fork) and infinity G1 keys (disallowed with soft fork 5) * BLSCache.update() takes GTElement now * use get_conditions_from_spendbundle() in test_mempool_manager, to get the ANALYZE_SPENDS behavior * fix mypy warnings --- chia/_tests/clvm/test_program.py | 24 ++++---- chia/_tests/core/full_node/test_conditions.py | 5 +- .../core/mempool/test_mempool_manager.py | 15 ++--- chia/_tests/wallet/test_signer_protocol.py | 4 +- chia/consensus/multiprocess_validation.py | 5 +- chia/full_node/mempool_manager.py | 2 +- chia/types/blockchain_format/program.py | 12 +--- poetry.lock | 59 ++++++++++--------- pyproject.toml | 2 +- 9 files changed, 61 insertions(+), 67 deletions(-) diff --git a/chia/_tests/clvm/test_program.py b/chia/_tests/clvm/test_program.py index 4e961e4376f4..447f67a91bc9 100644 --- a/chia/_tests/clvm/test_program.py +++ b/chia/_tests/clvm/test_program.py @@ -1,7 +1,6 @@ from __future__ import annotations import pytest -from chia_rs import ENABLE_FIXED_DIV from clvm.EvalError import EvalError from clvm.operators import KEYWORD_TO_ATOM from clvm_tools.binutils import assemble, disassemble @@ -120,27 +119,32 @@ def test_run() -> None: assert ret.atom == bytes([0xFE]) # run() - with pytest.raises(ValueError, match="div operator with negative operands is deprecated"): - cost, ret = div.run_with_cost(100000, [10, -5], 0) + cost, ret = div.run_with_cost(100000, [10, -5], 0) + assert cost == 1107 + print(ret) + assert ret.atom == bytes([0xFE]) - cost, ret = div.run_with_cost(100000, [10, -5], ENABLE_FIXED_DIV) + cost, ret = div.run_with_cost(100000, [10, -5], 0) assert cost == 1107 print(ret) assert ret.atom == bytes([0xFE]) # run_with_flags() - with pytest.raises(ValueError, match="div operator with negative operands is deprecated"): - cost, ret = div.run_with_flags(100000, 0, [10, -5]) + cost, ret = div.run_with_flags(100000, 0, [10, -5]) + assert cost == 1107 + print(ret) + assert ret.atom == bytes([0xFE]) - cost, ret = div.run_with_flags(100000, ENABLE_FIXED_DIV, [10, -5]) + cost, ret = div.run_with_flags(100000, 0, [10, -5]) assert cost == 1107 print(ret) assert ret.atom == bytes([0xFE]) # run_with_cost() - with pytest.raises(ValueError, match="div operator with negative operands is deprecated"): - ret = div.run([10, -5], 100000, 0) + ret = div.run([10, -5], 100000, 0) + print(ret) + assert ret.atom == bytes([0xFE]) - ret = div.run([10, -5], 100000, ENABLE_FIXED_DIV) + ret = div.run([10, -5], 100000, 0) print(ret) assert ret.atom == bytes([0xFE]) diff --git a/chia/_tests/core/full_node/test_conditions.py b/chia/_tests/core/full_node/test_conditions.py index f5b25ba43605..72a7fc4af2e8 100644 --- a/chia/_tests/core/full_node/test_conditions.py +++ b/chia/_tests/core/full_node/test_conditions.py @@ -480,10 +480,7 @@ async def test_agg_sig_infinity( ) # infinity is disallowed after soft-fork-5 activates - if consensus_mode >= ConsensusMode.SOFT_FORK_5: - expected_error = Err.INVALID_CONDITION - else: - expected_error = None + expected_error = Err.INVALID_CONDITION await check_conditions(bt, conditions, expected_error) @pytest.mark.anyio diff --git a/chia/_tests/core/mempool/test_mempool_manager.py b/chia/_tests/core/mempool/test_mempool_manager.py index 582c536049fb..4d45dc77d01f 100644 --- a/chia/_tests/core/mempool/test_mempool_manager.py +++ b/chia/_tests/core/mempool/test_mempool_manager.py @@ -5,7 +5,7 @@ from typing import Any, Awaitable, Callable, Collection, Dict, List, Optional, Set, Tuple import pytest -from chia_rs import ELIGIBLE_FOR_DEDUP, ELIGIBLE_FOR_FF, AugSchemeMPL, G2Element +from chia_rs import ELIGIBLE_FOR_DEDUP, ELIGIBLE_FOR_FF, AugSchemeMPL, G2Element, get_conditions_from_spendbundle from chiabip158 import PyBIP158 from chia._tests.conftest import ConsensusMode @@ -13,9 +13,8 @@ from chia._tests.util.setup_nodes import OldSimulatorsAndWallets, setup_simulators_and_wallets from chia.consensus.constants import ConsensusConstants from chia.consensus.default_constants import DEFAULT_CONSTANTS -from chia.full_node.bundle_tools import simple_solution_generator from chia.full_node.mempool import MAX_SKIPPED_ITEMS, PRIORITY_TX_THRESHOLD -from chia.full_node.mempool_check_conditions import get_name_puzzle_conditions, mempool_check_time_locks +from chia.full_node.mempool_check_conditions import mempool_check_time_locks from chia.full_node.mempool_manager import ( MEMPOOL_MIN_FEE_INCREASE, QUOTE_BYTES, @@ -442,16 +441,12 @@ def make_bundle_spends_map_and_fee( def mempool_item_from_spendbundle(spend_bundle: SpendBundle) -> MempoolItem: - generator = simple_solution_generator(spend_bundle) - npc_result = get_name_puzzle_conditions( - generator=generator, max_cost=INFINITE_COST, mempool_mode=True, height=uint32(0), constants=DEFAULT_CONSTANTS - ) - assert npc_result.conds is not None - bundle_coin_spends, fee = make_bundle_spends_map_and_fee(spend_bundle, npc_result.conds) + conds = get_conditions_from_spendbundle(spend_bundle, INFINITE_COST, DEFAULT_CONSTANTS, uint32(0)) + bundle_coin_spends, fee = make_bundle_spends_map_and_fee(spend_bundle, conds) return MempoolItem( spend_bundle=spend_bundle, fee=fee, - conds=npc_result.conds, + conds=conds, spend_bundle_name=spend_bundle.name(), height_added_to_mempool=TEST_HEIGHT, bundle_coin_spends=bundle_coin_spends, diff --git a/chia/_tests/wallet/test_signer_protocol.py b/chia/_tests/wallet/test_signer_protocol.py index c1b641748c44..5896d02748bc 100644 --- a/chia/_tests/wallet/test_signer_protocol.py +++ b/chia/_tests/wallet/test_signer_protocol.py @@ -152,7 +152,9 @@ async def test_p2dohp_wallet_signer_protocol(wallet_environments: WalletTestFram ) assert derivation_record is not None pubkey: G1Element = derivation_record.pubkey - synthetic_pubkey: G1Element = G1Element.from_bytes(puzzle.uncurry()[1].at("f").atom) + atom = puzzle.uncurry()[1].at("f").atom + assert atom is not None + synthetic_pubkey: G1Element = G1Element.from_bytes(atom) message: bytes = delegated_puzzle_hash + coin.name() + wallet_state_manager.constants.AGG_SIG_ME_ADDITIONAL_DATA utx: UnsignedTransaction = UnsignedTransaction( diff --git a/chia/consensus/multiprocess_validation.py b/chia/consensus/multiprocess_validation.py index 8378bc89ff5e..9a9183607b98 100644 --- a/chia/consensus/multiprocess_validation.py +++ b/chia/consensus/multiprocess_validation.py @@ -106,8 +106,9 @@ def batch_pre_validate_blocks( header_block = get_block_header(block, tx_additions, removals) prev_ses_block = None if prev_ses_block_bytes is not None and len(prev_ses_block_bytes) > 0: - if prev_ses_block_bytes[i] is not None: - prev_ses_block = BlockRecord.from_bytes_unchecked(prev_ses_block_bytes[i]) + buffer = prev_ses_block_bytes[i] + if buffer is not None: + prev_ses_block = BlockRecord.from_bytes_unchecked(buffer) required_iters, error = validate_finished_header_block( constants, BlockCache(blocks), diff --git a/chia/full_node/mempool_manager.py b/chia/full_node/mempool_manager.py index 4387e89e746e..a80fa3fc5b14 100644 --- a/chia/full_node/mempool_manager.py +++ b/chia/full_node/mempool_manager.py @@ -301,7 +301,7 @@ async def pre_validate_spendbundle( self._worker_queue_size -= 1 if bls_cache is not None: - bls_cache.update([(e[0], bytes(e[1])) for e in new_cache_entries]) + bls_cache.update(new_cache_entries) ret = NPCResult(None, sbc) diff --git a/chia/types/blockchain_format/program.py b/chia/types/blockchain_format/program.py index 86e878e8273f..e1df72fdc1b0 100644 --- a/chia/types/blockchain_format/program.py +++ b/chia/types/blockchain_format/program.py @@ -3,15 +3,7 @@ import io from typing import TYPE_CHECKING, Any, Callable, Dict, Optional, Set, Tuple, Type, TypeVar -from chia_rs import ( - ALLOW_BACKREFS, - DISALLOW_INFINITY_G1, - ENABLE_BLS_OPS_OUTSIDE_GUARD, - ENABLE_FIXED_DIV, - MEMPOOL_MODE, - run_chia_program, - tree_hash, -) +from chia_rs import ALLOW_BACKREFS, MEMPOOL_MODE, run_chia_program, tree_hash from clvm.casts import int_from_bytes from clvm.CLVMObject import CLVMStorage from clvm.EvalError import EvalError @@ -26,7 +18,7 @@ INFINITE_COST = 11000000000 -DEFAULT_FLAGS = ENABLE_BLS_OPS_OUTSIDE_GUARD | ENABLE_FIXED_DIV | DISALLOW_INFINITY_G1 | MEMPOOL_MODE +DEFAULT_FLAGS = MEMPOOL_MODE T_CLVMStorage = TypeVar("T_CLVMStorage", bound=CLVMStorage) T_Program = TypeVar("T_Program", bound="Program") diff --git a/poetry.lock b/poetry.lock index 27b7dd40208d..ffdc7c02e027 100644 --- a/poetry.lock +++ b/poetry.lock @@ -796,39 +796,42 @@ dev = ["black (>=23.1.0)", "pytest (>=7.2.1)", "ruff (>=0.0.252)"] [[package]] name = "chia-rs" -version = "0.13.0" +version = "0.14.0" description = "Code useful for implementing chia consensus." optional = false python-versions = "*" files = [ - {file = "chia_rs-0.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4c9936c8188c27aca29f46ca9cade14126f130ac645ba661959f1b54d04b292"}, - {file = "chia_rs-0.13.0-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:278268c413617c15db697d9530f78347eeae02ed021fc4c8f367cfaf89d8fb79"}, - {file = "chia_rs-0.13.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:f1c4207205274744ad9215f0725a7938f87c51834b4e90e2c3604c5ca7a6ce0a"}, - {file = "chia_rs-0.13.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:9c732be4524d54ff4d20b4df1efdc7ae248ef21088dbafc6b7037317e982779d"}, - {file = "chia_rs-0.13.0-cp310-none-win_amd64.whl", hash = "sha256:02df1cbfcbd47a248083e3d6072cf84b4aae21b785edfd1c6861111c8b6af3e5"}, - {file = "chia_rs-0.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b0ceced7dd5484bef18bad3a3214cc16ea8e5928d1f67dafe0b2d186db58f13"}, - {file = "chia_rs-0.13.0-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:c23be0ca119bfae779e3a26fda9ec0f2943aa561f23d390d6ee43cf6d8779964"}, - {file = "chia_rs-0.13.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:dbe55c44c9ebdd780f5cf3b682323fcc7fce233b999d2aa1f835fb19c488e97c"}, - {file = "chia_rs-0.13.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:0d75fbb787c5fdc52493e17e3913d70fcf7d00d5658bc06143c789480398e1f7"}, - {file = "chia_rs-0.13.0-cp311-none-win_amd64.whl", hash = "sha256:e23d81e3596355a8a0da9fa972e8487c3e964e128a5d5ed9693badd7d532b6f7"}, - {file = "chia_rs-0.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:732d0f26b2567f3743375085562bd0b2940b65e5238d3d05d925dd1e150dd5e1"}, - {file = "chia_rs-0.13.0-cp312-cp312-macosx_11_0_x86_64.whl", hash = "sha256:cc78b5b22f6d0eaaa33f1df8acf73b5ae21c752ee83c6a4e4f6d4d16a1cf13d4"}, - {file = "chia_rs-0.13.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:d94cec83023e7e56add959a09e6623cc611bd6a2fc875332339bb22d196305a4"}, - {file = "chia_rs-0.13.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:d977f682b8c8844b1fba25d2e3fe597297f59388415c383ca358d7b3e109df8e"}, - {file = "chia_rs-0.13.0-cp312-none-win_amd64.whl", hash = "sha256:1f030209a990e3b6d0a2c4533f6e2bd6d492567dc8f744700e6aaca6272b0bf0"}, - {file = "chia_rs-0.13.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a4178d41cce7191fb8171abea838b2382e77ccbe6b888a2dd3fb6f2604f75140"}, - {file = "chia_rs-0.13.0-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:f7e4a5c64dfef763e624f843f642207c6005c8aac70795e826e4b804f84a6188"}, - {file = "chia_rs-0.13.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:2302b83f470677aa96aa920497a7a550100772b65bb8b4e3997878819eee05ad"}, - {file = "chia_rs-0.13.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:887385eacfbade82678639e410aaa0590b6a9a41a27c1e507fb845c035d2e807"}, - {file = "chia_rs-0.13.0-cp38-none-win_amd64.whl", hash = "sha256:d478fc1ca2a6a43080014124aeb689ff402553ef2680f6cbe7e2d25535913893"}, - {file = "chia_rs-0.13.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1e3755e1f94b647e358bf3d4fab2f08b993008b0b516351342202b606c652467"}, - {file = "chia_rs-0.13.0-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:3b3b09a3e7efe5b35543955c4d57de03f09b66930500b46ce7f05b040592f304"}, - {file = "chia_rs-0.13.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:7d38729365a2df3aaed87a021351ec090d7986854c8e3975693df4b562a6721a"}, - {file = "chia_rs-0.13.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:77bdae5d408dc48f636f0c3b0cda7ac91cae98efec3ff1806a10855ae2068584"}, - {file = "chia_rs-0.13.0-cp39-none-win_amd64.whl", hash = "sha256:b1d76328b68e97f6bcad985783021370a1986fbb3e5e1c13051ba2a8761cb2e2"}, - {file = "chia_rs-0.13.0.tar.gz", hash = "sha256:c95108b0c9137a5920310aeae58ec609e4edbc34d4a216a00d0570c4ef797881"}, + {file = "chia_rs-0.14.0-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:defa14a8a9532d2d0eb3b6b263ce6ad2c2c3ac5b37ff49e42a4202b1855d6ce9"}, + {file = "chia_rs-0.14.0-cp310-cp310-macosx_13_0_x86_64.whl", hash = "sha256:74724d50d18f48d3643e10308ab6b1ad98dbd47a136a9b293a4c985d91069b21"}, + {file = "chia_rs-0.14.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:dc1052c718dc930997b4ef50478d24973dad2b518ba0634347f7815b5b8f6643"}, + {file = "chia_rs-0.14.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:0aee2574d24c5db06a74cb0d69949f03575cdf33a7e7a8673cdab298bdf491a8"}, + {file = "chia_rs-0.14.0-cp310-none-win_amd64.whl", hash = "sha256:291a3821951c3505e1172c772ee329f75fe49961a52952d57fdd49eddf8ad22a"}, + {file = "chia_rs-0.14.0-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:4020b1086a8ab26aeee39be71c87b6e8c16481ce75eb82200d394f762ddbbc0b"}, + {file = "chia_rs-0.14.0-cp311-cp311-macosx_13_0_x86_64.whl", hash = "sha256:9e9e9f43259e7a8281a3a731f42bc14b2bf91bc2d3ef51cd5c49b1cefb6e2389"}, + {file = "chia_rs-0.14.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:a87faa328af72e105e3bf02f276e225aabcba4748c392555905bc8be211ef6d1"}, + {file = "chia_rs-0.14.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:138c0f42d522a97a9486440ecdd943dcd58b38b96d4830f4fe6f00413dcfadf1"}, + {file = "chia_rs-0.14.0-cp311-none-win_amd64.whl", hash = "sha256:4b6265ebe1349bfc743db19a2a9c33fc79e97826f2acfe26554375cd929628c8"}, + {file = "chia_rs-0.14.0-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:740d4ac6222e82fc0dc2fddc04148d0504b383ee68f3ae094f91bc9a2936d20d"}, + {file = "chia_rs-0.14.0-cp312-cp312-macosx_13_0_x86_64.whl", hash = "sha256:e0757077264605cdaa7e0f49b95fc8c075808348cd640e30ce9c40132b107d42"}, + {file = "chia_rs-0.14.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:49c282441e23c089aa94d33b1a24d1324383aedb5e20af9b42d6e87a4f26ec1f"}, + {file = "chia_rs-0.14.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:c247aef6154194670338ad2e95783dadc5a82b5f671edb3c9314dd95505553a4"}, + {file = "chia_rs-0.14.0-cp312-none-win_amd64.whl", hash = "sha256:75a51561e3bd375884853492e7a8f41162694593f39deb1d2757f98795d311aa"}, + {file = "chia_rs-0.14.0-cp38-cp38-macosx_13_0_arm64.whl", hash = "sha256:40873da635ea0a253e006eb427f5823b2123ed9045bf0a548902035b0c7bd214"}, + {file = "chia_rs-0.14.0-cp38-cp38-macosx_13_0_x86_64.whl", hash = "sha256:fcb4fe4ebcaac87780c54a7fac12dea3dcd142c061c6b4d3e38e303c7e18857a"}, + {file = "chia_rs-0.14.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:636ba7435aa7f114f0cbf687c2ac7ea868f98c47c8c1b5e7894a1fbc8197d8d3"}, + {file = "chia_rs-0.14.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:db45d48d55554933d71bad7169aa3ea2c2d99d4bd8e37e43e7f84b0fdd5b97a5"}, + {file = "chia_rs-0.14.0-cp38-none-win_amd64.whl", hash = "sha256:5e813775655a41990dc6e9ef4f66c958aa11c0bc43b7a7e68c99c392aab9f560"}, + {file = "chia_rs-0.14.0-cp39-cp39-macosx_13_0_arm64.whl", hash = "sha256:4667bcb01fa2ffcaea02f6e9c9f492319abdd4c0133ab7c65e3601d8d70bfe9b"}, + {file = "chia_rs-0.14.0-cp39-cp39-macosx_13_0_x86_64.whl", hash = "sha256:3ac5861cc1a5093ecea80dbfc6bf152a8cc44610707a0ad4a88fea5c2b019e28"}, + {file = "chia_rs-0.14.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:faca2e80513eaef000663384f1abec39caed642dc5812729550448067322b1f9"}, + {file = "chia_rs-0.14.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:892623e6df27c41e344431bf2f4440f46aacc4a4aa48aff2728b144e6f6a270b"}, + {file = "chia_rs-0.14.0-cp39-none-win_amd64.whl", hash = "sha256:a03362e6283d0fc1bc5063db666dd75da7fd0e52df32eb5a68095e0564bae4ee"}, + {file = "chia_rs-0.14.0.tar.gz", hash = "sha256:6652e7c328e42b31e9be8e985c1bfc1ddcd83cf31e6b5eb9c0a31a641411677b"}, ] +[package.dependencies] +typing-extensions = "*" + [[package]] name = "chiabip158" version = "1.5.1" @@ -3454,4 +3457,4 @@ upnp = ["miniupnpc"] [metadata] lock-version = "2.0" python-versions = ">=3.8.10, <3.13" -content-hash = "f618941f26166db2b1aab5526574e1cb3a7c6fee2cfbcd0ce8b602773b0967b5" +content-hash = "0d1bba535311a5b058758bcae6a8f720de7c31e9688ed5388b96a1edbb2acb9d" diff --git a/pyproject.toml b/pyproject.toml index 37560b6efccf..178da24bffe9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -46,7 +46,7 @@ bitstring = "4.1.4" # Binary data management library boto3 = "1.34.143" # AWS S3 for Data Layer S3 plugin chiabip158 = "1.5.1" # bip158-style wallet filters chiapos = "2.0.4" # proof of space -chia_rs = "0.13.0" +chia_rs = "0.14.0" chiavdf = "1.1.4" # timelord and vdf verification click = "8.1.7" # For the CLI clvm = "0.9.10" From 439cd07590d3a3fab938ac69f0ce6cfe2cc03503 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Thu, 26 Sep 2024 08:55:36 -0700 Subject: [PATCH 06/69] CA Cert updates (#18627) adding ca updates Co-authored-by: ChiaAutomation --- mozilla-ca | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mozilla-ca b/mozilla-ca index 7da6b4833244..0aecf4ed7c6f 160000 --- a/mozilla-ca +++ b/mozilla-ca @@ -1 +1 @@ -Subproject commit 7da6b48332442b0936ccd2bea649ccba449b9d8b +Subproject commit 0aecf4ed7c6f2b20a89d3d3386b866c1a3f03139 From 0059b5e05267ae51823bd3a5cb26260f96ecf298 Mon Sep 17 00:00:00 2001 From: Arvid Norberg Date: Thu, 26 Sep 2024 21:04:01 +0200 Subject: [PATCH 07/69] [CHIA-1087] Simplify batch pre validate blocks (#18602) * minor simplification of stacked if-conditions and early exits on failure paths * Simplify NPCResult -> SpendBundleConditions * make include_spends() take SpendBundleConditions, rather than NPCResult --- chia/consensus/block_body_validation.py | 9 ++- chia/consensus/blockchain.py | 6 +- chia/consensus/multiprocess_validation.py | 80 ++++++++++++----------- 3 files changed, 48 insertions(+), 47 deletions(-) diff --git a/chia/consensus/block_body_validation.py b/chia/consensus/block_body_validation.py index bb42e87434db..1ef2908877c5 100644 --- a/chia/consensus/block_body_validation.py +++ b/chia/consensus/block_body_validation.py @@ -5,7 +5,7 @@ from dataclasses import dataclass, field from typing import Awaitable, Callable, Collection, Dict, List, Optional, Set, Tuple, Union -from chia_rs import AugSchemeMPL, BLSCache, G1Element +from chia_rs import AugSchemeMPL, BLSCache, G1Element, SpendBundleConditions from chiabip158 import PyBIP158 from chia.consensus.block_record import BlockRecord @@ -85,7 +85,7 @@ def reset(self, fork_height: int, header_hash: bytes32) -> None: self.removals_since_fork = {} self.block_hashes = [] - def include_spends(self, npc_result: Optional[NPCResult], block: FullBlock, header_hash: bytes32) -> None: + def include_spends(self, conds: Optional[SpendBundleConditions], block: FullBlock, header_hash: bytes32) -> None: height = block.height assert self.peak_height == height - 1 @@ -97,11 +97,10 @@ def include_spends(self, npc_result: Optional[NPCResult], block: FullBlock, head self.peak_height = int(block.height) self.peak_hash = header_hash - if npc_result is not None: - assert npc_result.conds is not None + if conds is not None: assert block.foliage_transaction_block is not None timestamp = block.foliage_transaction_block.timestamp - for spend in npc_result.conds.spends: + for spend in conds.spends: self.removals_since_fork[bytes32(spend.coin_id)] = ForkRem(bytes32(spend.puzzle_hash), height) for puzzle_hash, amount, hint in spend.create_coin: coin = Coin(bytes32(spend.coin_id), bytes32(puzzle_hash), uint64(amount)) diff --git a/chia/consensus/blockchain.py b/chia/consensus/blockchain.py index e9aa020bbda5..8f4f5a3b22ea 100644 --- a/chia/consensus/blockchain.py +++ b/chia/consensus/blockchain.py @@ -282,7 +282,7 @@ async def run_single_block(self, block: FullBlock, fork_info: ForkInfo) -> None: ) assert npc.error is None - fork_info.include_spends(npc, block, block.header_hash) + fork_info.include_spends(None if npc is None else npc.conds, block, block.header_hash) async def add_block( self, @@ -412,7 +412,7 @@ async def add_block( # main chain, we still need to re-run it to update the additions and # removals in fork_info. await self.advance_fork_info(block, fork_info) - fork_info.include_spends(npc_result, block, header_hash) + fork_info.include_spends(None if npc_result is None else npc_result.conds, block, header_hash) self.add_block_record(block_rec) return AddBlockResult.ALREADY_HAVE_BLOCK, None, None @@ -444,7 +444,7 @@ async def add_block( # case we're validating blocks on a fork, the next block validation will # need to know of these additions and removals. Also, _reconsider_peak() # will need these results - fork_info.include_spends(npc_result, block, header_hash) + fork_info.include_spends(None if npc_result is None else npc_result.conds, block, header_hash) # block_to_block_record() require the previous block in the cache if not genesis and prev_block is not None: diff --git a/chia/consensus/multiprocess_validation.py b/chia/consensus/multiprocess_validation.py index 9a9183607b98..dc80c91016bb 100644 --- a/chia/consensus/multiprocess_validation.py +++ b/chia/consensus/multiprocess_validation.py @@ -8,7 +8,7 @@ from dataclasses import dataclass from typing import Dict, List, Optional, Sequence, Tuple -from chia_rs import AugSchemeMPL +from chia_rs import AugSchemeMPL, SpendBundleConditions from chia.consensus.block_header_validation import validate_finished_header_block from chia.consensus.block_record import BlockRecord @@ -53,7 +53,7 @@ def batch_pre_validate_blocks( blocks_pickled: Dict[bytes, bytes], full_blocks_pickled: List[bytes], prev_transaction_generators: List[Optional[List[bytes]]], - npc_results: Dict[uint32, bytes], + conditions: Dict[uint32, bytes], expected_difficulty: List[uint64], expected_sub_slot_iters: List[uint64], validate_signatures: bool, @@ -71,16 +71,14 @@ def batch_pre_validate_blocks( block: FullBlock = FullBlock.from_bytes_unchecked(full_blocks_pickled[i]) tx_additions: List[Coin] = [] removals: List[bytes32] = [] - npc_result: Optional[NPCResult] = None - if block.height in npc_results: - npc_result = NPCResult.from_bytes(npc_results[block.height]) - assert npc_result is not None - if npc_result.conds is not None: - removals, tx_additions = tx_removals_and_additions(npc_result.conds) - else: - removals, tx_additions = [], [] - - if block.transactions_generator is not None and npc_result is None: + conds: Optional[SpendBundleConditions] = None + if block.height in conditions: + conds = SpendBundleConditions.from_bytes(conditions[block.height]) + removals, tx_additions = tx_removals_and_additions(conds) + elif block.transactions_generator is not None: + # TODO: this function would be simpler if conditions were + # required to be passed in for all transaction blocks. We would + # no longer need prev_transaction_generators prev_generators = prev_transaction_generators[i] assert prev_generators is not None assert block.transactions_info is not None @@ -93,15 +91,17 @@ def batch_pre_validate_blocks( height=block.height, constants=constants, ) - removals, tx_additions = tx_removals_and_additions(npc_result.conds) - if npc_result is not None and npc_result.error is not None: - validation_time = time.monotonic() - validation_start - results.append( - PreValidationResult( - uint16(npc_result.error), None, npc_result, False, uint32(validation_time * 1000) + if npc_result.error is not None: + validation_time = time.monotonic() - validation_start + results.append( + PreValidationResult( + uint16(npc_result.error), None, npc_result, False, uint32(validation_time * 1000) + ) ) - ) - continue + continue + assert npc_result.conds is not None + conds = npc_result.conds + removals, tx_additions = tx_removals_and_additions(conds) header_block = get_block_header(block, tx_additions, removals) prev_ses_block = None @@ -123,28 +123,28 @@ def batch_pre_validate_blocks( error_int = uint16(error.code.value) successfully_validated_signatures = False - # If we failed CLVM, no need to validate signature, the block is already invalid - if error_int is None: - # If this is False, it means either we don't have a signature (not a tx block) or we have an invalid - # signature (which also puts in an error) or we didn't validate the signature because we want to - # validate it later. add_block will attempt to validate the signature later. - if validate_signatures: - if npc_result is not None and block.transactions_info is not None: - assert npc_result.conds - pairs_pks, pairs_msgs = pkm_pairs(npc_result.conds, constants.AGG_SIG_ME_ADDITIONAL_DATA) - if not AugSchemeMPL.aggregate_verify( - pairs_pks, pairs_msgs, block.transactions_info.aggregated_signature - ): - error_int = uint16(Err.BAD_AGGREGATE_SIGNATURE.value) - else: - successfully_validated_signatures = True + # If we failed header block validation, no need to validate + # signature, the block is already invalid If this is False, it means + # either we don't have a signature (not a tx block) or we have an + # invalid signature (which also puts in an error) or we didn't + # validate the signature because we want to validate it later. + # add_block will attempt to validate the signature later. + if error_int is None and validate_signatures and conds is not None: + assert block.transactions_info is not None + pairs_pks, pairs_msgs = pkm_pairs(conds, constants.AGG_SIG_ME_ADDITIONAL_DATA) + if not AugSchemeMPL.aggregate_verify( + pairs_pks, pairs_msgs, block.transactions_info.aggregated_signature + ): + error_int = uint16(Err.BAD_AGGREGATE_SIGNATURE.value) + else: + successfully_validated_signatures = True validation_time = time.monotonic() - validation_start results.append( PreValidationResult( error_int, required_iters, - npc_result, + None if conds is None else NPCResult(None, conds), successfully_validated_signatures, uint32(validation_time * 1000), ) @@ -274,9 +274,11 @@ async def pre_validate_blocks_multiprocessing( if block_rec.sub_epoch_summary_included is not None: prev_ses_block = block_rec - npc_results_pickled = {} + conditions_pickled = {} for k, v in npc_results.items(): - npc_results_pickled[k] = bytes(v) + assert v.error is None + assert v.conds is not None + conditions_pickled[k] = bytes(v.conds) futures = [] # Pool of workers to validate blocks concurrently recent_blocks_bytes = {bytes(k): bytes(v) for k, v in recent_blocks.items()} # convert to bytes @@ -321,7 +323,7 @@ async def pre_validate_blocks_multiprocessing( recent_blocks_bytes, b_pickled, previous_generators, - npc_results_pickled, + conditions_pickled, [diff_ssis[j][0] for j in range(i, end_i)], [diff_ssis[j][1] for j in range(i, end_i)], validate_signatures, From b7fbc6b2350a2ab8717ecbad266f052006db85b7 Mon Sep 17 00:00:00 2001 From: Izumi Hoshino Date: Fri, 27 Sep 2024 06:11:58 +0900 Subject: [PATCH 08/69] Updated GUI pin for 2.4.4 (#18607) Updated GUI pin --- chia-blockchain-gui | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/chia-blockchain-gui b/chia-blockchain-gui index d92ed1db65b5..fa2a9969f2c7 160000 --- a/chia-blockchain-gui +++ b/chia-blockchain-gui @@ -1 +1 @@ -Subproject commit d92ed1db65b5dbd2231f09aec5ecd759c49fa74d +Subproject commit fa2a9969f2c78b9f69f8bf9b6fc25c013a505520 From 3ca9b5ee91159d1e7657883385f760036e32b686 Mon Sep 17 00:00:00 2001 From: Kyle Altendorf Date: Fri, 27 Sep 2024 13:00:33 -0400 Subject: [PATCH 09/69] remove no-wallet-found traceback from `/dl_owned_singletons` (#18632) --- chia/rpc/wallet_rpc_api.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/chia/rpc/wallet_rpc_api.py b/chia/rpc/wallet_rpc_api.py index 4247d056ac31..7bb2f2749481 100644 --- a/chia/rpc/wallet_rpc_api.py +++ b/chia/rpc/wallet_rpc_api.py @@ -4416,7 +4416,11 @@ async def dl_owned_singletons(self, request: Dict[str, Any]) -> EndpointResult: if self.service.wallet_state_manager is None: raise ValueError("The wallet service is not currently initialized") - wallet = self.service.wallet_state_manager.get_dl_wallet() + try: + wallet = self.service.wallet_state_manager.get_dl_wallet() + except ValueError: + return {"success": False, "error": "no DataLayer wallet available"} + singletons = await wallet.get_owned_singletons() singletons_json = [singleton.to_json_dict() for singleton in singletons] From c0c12b90c668e7912d5490b50a0e4ce32292069a Mon Sep 17 00:00:00 2001 From: Earle Lowe <30607889+emlowe@users.noreply.github.com> Date: Fri, 27 Sep 2024 10:02:29 -0700 Subject: [PATCH 10/69] [CHIA-1437]: Reduce level log spam when downloading DAT files (#18610) * Reduce level log spam when downloading files * black fix * Just change info->debug --- chia/data_layer/download_data.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/chia/data_layer/download_data.py b/chia/data_layer/download_data.py index 066313656c3f..1e0e4dae5a1e 100644 --- a/chia/data_layer/download_data.py +++ b/chia/data_layer/download_data.py @@ -377,4 +377,4 @@ async def http_download( new_percentage = f"{progress_byte / size:.0%}" if new_percentage != progress_percentage: progress_percentage = new_percentage - log.info(f"Downloading delta file {filename}. {progress_percentage} of {size} bytes.") + log.debug(f"Downloading delta file {filename}. {progress_percentage} of {size} bytes.") From 8552e35e1421ed4f2222198ab9c94216358c1a3c Mon Sep 17 00:00:00 2001 From: Amine Khaldi Date: Mon, 30 Sep 2024 17:32:26 +0100 Subject: [PATCH 11/69] CHIA-1491 Annotate test_wallet_interested_store.py (#18644) Annotate test_wallet_interested_store.py. --- chia/_tests/wallet/test_wallet_interested_store.py | 2 +- mypy-exclusions.txt | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/chia/_tests/wallet/test_wallet_interested_store.py b/chia/_tests/wallet/test_wallet_interested_store.py index 942a18546af0..49e305286256 100644 --- a/chia/_tests/wallet/test_wallet_interested_store.py +++ b/chia/_tests/wallet/test_wallet_interested_store.py @@ -12,7 +12,7 @@ @pytest.mark.anyio -async def test_store(seeded_random: random.Random): +async def test_store(seeded_random: random.Random) -> None: async with DBConnection(1) as db_wrapper: store = await WalletInterestedStore.create(db_wrapper) coin_1 = Coin(bytes32.random(seeded_random), bytes32.random(seeded_random), uint64(12312)) diff --git a/mypy-exclusions.txt b/mypy-exclusions.txt index 558ab288ffb2..61de8eac8919 100644 --- a/mypy-exclusions.txt +++ b/mypy-exclusions.txt @@ -94,6 +94,5 @@ chia._tests.util.time_out_assert chia._tests.wallet.did_wallet.test_did chia._tests.wallet.rpc.test_wallet_rpc chia._tests.wallet.test_taproot -chia._tests.wallet.test_wallet_interested_store tools.analyze-chain tools.run_block From 33e79e3668aa3be38e2dc1a83155b43bf515c7a9 Mon Sep 17 00:00:00 2001 From: Amine Khaldi Date: Mon, 30 Sep 2024 17:32:37 +0100 Subject: [PATCH 12/69] CHIA-1485 Simplify calling pre_validate_spendbundle and avoid spend vs spend bundle confusion (#18639) Simplify calling pre_validate_spendbundle and avoid spend vs spend bundle confusion. --- .../core/mempool/test_mempool_manager.py | 24 +++++++++---------- chia/full_node/mempool_manager.py | 14 +++++------ chia/rpc/full_node_rpc_api.py | 7 ++---- 3 files changed, 21 insertions(+), 24 deletions(-) diff --git a/chia/_tests/core/mempool/test_mempool_manager.py b/chia/_tests/core/mempool/test_mempool_manager.py index 4d45dc77d01f..4d5ac4677679 100644 --- a/chia/_tests/core/mempool/test_mempool_manager.py +++ b/chia/_tests/core/mempool/test_mempool_manager.py @@ -458,7 +458,7 @@ async def test_empty_spend_bundle() -> None: mempool_manager = await instantiate_mempool_manager(zero_calls_get_coin_records) sb = SpendBundle([], G2Element()) with pytest.raises(ValidationError, match="INVALID_SPEND_BUNDLE"): - await mempool_manager.pre_validate_spendbundle(sb, sb.name()) + await mempool_manager.pre_validate_spendbundle(sb) @pytest.mark.anyio @@ -467,7 +467,7 @@ async def test_negative_addition_amount() -> None: conditions = [[ConditionOpcode.CREATE_COIN, IDENTITY_PUZZLE_HASH, -1]] sb = spend_bundle_from_conditions(conditions) with pytest.raises(ValidationError, match="COIN_AMOUNT_NEGATIVE"): - await mempool_manager.pre_validate_spendbundle(sb, sb.name()) + await mempool_manager.pre_validate_spendbundle(sb) @pytest.mark.anyio @@ -478,7 +478,7 @@ async def test_valid_addition_amount() -> None: coin = Coin(IDENTITY_PUZZLE_HASH, IDENTITY_PUZZLE_HASH, max_amount) sb = spend_bundle_from_conditions(conditions, coin) # ensure this does not throw - _ = await mempool_manager.pre_validate_spendbundle(sb, sb.name()) + _ = await mempool_manager.pre_validate_spendbundle(sb) @pytest.mark.anyio @@ -488,7 +488,7 @@ async def test_too_big_addition_amount() -> None: conditions = [[ConditionOpcode.CREATE_COIN, IDENTITY_PUZZLE_HASH, max_amount + 1]] sb = spend_bundle_from_conditions(conditions) with pytest.raises(ValidationError, match="COIN_AMOUNT_EXCEEDS_MAXIMUM"): - await mempool_manager.pre_validate_spendbundle(sb, sb.name()) + await mempool_manager.pre_validate_spendbundle(sb) @pytest.mark.anyio @@ -500,7 +500,7 @@ async def test_duplicate_output() -> None: ] sb = spend_bundle_from_conditions(conditions) with pytest.raises(ValidationError, match="DUPLICATE_OUTPUT"): - await mempool_manager.pre_validate_spendbundle(sb, sb.name()) + await mempool_manager.pre_validate_spendbundle(sb) @pytest.mark.anyio @@ -511,7 +511,7 @@ async def test_block_cost_exceeds_max() -> None: conditions.append([ConditionOpcode.CREATE_COIN, IDENTITY_PUZZLE_HASH, i]) sb = spend_bundle_from_conditions(conditions) with pytest.raises(ValidationError, match="BLOCK_COST_EXCEEDS_MAX"): - await mempool_manager.pre_validate_spendbundle(sb, sb.name()) + await mempool_manager.pre_validate_spendbundle(sb) @pytest.mark.anyio @@ -519,9 +519,9 @@ async def test_double_spend_prevalidation() -> None: mempool_manager = await instantiate_mempool_manager(zero_calls_get_coin_records) conditions = [[ConditionOpcode.CREATE_COIN, IDENTITY_PUZZLE_HASH, 1]] sb = spend_bundle_from_conditions(conditions) - sb_twice: SpendBundle = SpendBundle.aggregate([sb, sb]) + sb_twice = SpendBundle.aggregate([sb, sb]) with pytest.raises(ValidationError, match="DOUBLE_SPEND"): - await mempool_manager.pre_validate_spendbundle(sb_twice, sb_twice.name()) + await mempool_manager.pre_validate_spendbundle(sb_twice) @pytest.mark.anyio @@ -529,11 +529,11 @@ async def test_minting_coin() -> None: mempool_manager = await instantiate_mempool_manager(zero_calls_get_coin_records) conditions = [[ConditionOpcode.CREATE_COIN, IDENTITY_PUZZLE_HASH, TEST_COIN_AMOUNT]] sb = spend_bundle_from_conditions(conditions) - _ = await mempool_manager.pre_validate_spendbundle(sb, sb.name()) + _ = await mempool_manager.pre_validate_spendbundle(sb) conditions = [[ConditionOpcode.CREATE_COIN, IDENTITY_PUZZLE_HASH, TEST_COIN_AMOUNT + 1]] sb = spend_bundle_from_conditions(conditions) with pytest.raises(ValidationError, match="MINTING_COIN"): - await mempool_manager.pre_validate_spendbundle(sb, sb.name()) + await mempool_manager.pre_validate_spendbundle(sb) @pytest.mark.anyio @@ -541,11 +541,11 @@ async def test_reserve_fee_condition() -> None: mempool_manager = await instantiate_mempool_manager(zero_calls_get_coin_records) conditions = [[ConditionOpcode.RESERVE_FEE, TEST_COIN_AMOUNT]] sb = spend_bundle_from_conditions(conditions) - _ = await mempool_manager.pre_validate_spendbundle(sb, sb.name()) + _ = await mempool_manager.pre_validate_spendbundle(sb) conditions = [[ConditionOpcode.RESERVE_FEE, TEST_COIN_AMOUNT + 1]] sb = spend_bundle_from_conditions(conditions) with pytest.raises(ValidationError, match="RESERVE_FEE_CONDITION_FAILED"): - await mempool_manager.pre_validate_spendbundle(sb, sb.name()) + await mempool_manager.pre_validate_spendbundle(sb) @pytest.mark.anyio diff --git a/chia/full_node/mempool_manager.py b/chia/full_node/mempool_manager.py index a80fa3fc5b14..51af98ffa6e5 100644 --- a/chia/full_node/mempool_manager.py +++ b/chia/full_node/mempool_manager.py @@ -264,17 +264,14 @@ def remove_seen(self, bundle_hash: bytes32) -> None: self.seen_bundle_hashes.pop(bundle_hash) async def pre_validate_spendbundle( - self, - new_spend: SpendBundle, - spend_name: bytes32, - bls_cache: Optional[BLSCache] = None, + self, spend_bundle: SpendBundle, spend_bundle_id: Optional[bytes32] = None, bls_cache: Optional[BLSCache] = None ) -> SpendBundleConditions: """ Errors are included within the cached_result. This runs in another process so we don't block the main thread """ - if new_spend.coin_spends == []: + if spend_bundle.coin_spends == []: raise ValidationError(Err.INVALID_SPEND_BUNDLE, "Empty SpendBundle") assert self.peak is not None @@ -284,7 +281,7 @@ async def pre_validate_spendbundle( sbc, new_cache_entries, duration = await asyncio.get_running_loop().run_in_executor( self.pool, validate_clvm_and_signature, - new_spend, + spend_bundle, self.max_tx_clvm_cost, self.constants, self.peak.height, @@ -305,10 +302,13 @@ async def pre_validate_spendbundle( ret = NPCResult(None, sbc) + if spend_bundle_id is None: + spend_bundle_id = spend_bundle.name() + log.log( logging.DEBUG if duration < 2 else logging.WARNING, f"pre_validate_spendbundle took {duration:0.4f} seconds " - f"for {spend_name} (queue-size: {self._worker_queue_size})", + f"for {spend_bundle_id} (queue-size: {self._worker_queue_size})", ) if ret.error is not None: raise ValidationError(Err(ret.error), "pre_validate_spendbundle failed") diff --git a/chia/rpc/full_node_rpc_api.py b/chia/rpc/full_node_rpc_api.py index faba0a43b4aa..976c4f28d628 100644 --- a/chia/rpc/full_node_rpc_api.py +++ b/chia/rpc/full_node_rpc_api.py @@ -859,11 +859,8 @@ async def _validate_fee_estimate_cost(self, request: Dict[str, Any]) -> uint64: raise ValueError(f"Request must contain exactly one of {ns}") if "spend_bundle" in request: - spend_bundle: SpendBundle = SpendBundle.from_json_dict(request["spend_bundle"]) - spend_name = spend_bundle.name() - conds: SpendBundleConditions = await self.service.mempool_manager.pre_validate_spendbundle( - spend_bundle, spend_name - ) + spend_bundle = SpendBundle.from_json_dict(request["spend_bundle"]) + conds: SpendBundleConditions = await self.service.mempool_manager.pre_validate_spendbundle(spend_bundle) cost = conds.cost elif "cost" in request: cost = request["cost"] From cc554eac9079f86dfd52a5dd8e85eeae83311fdc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 30 Sep 2024 09:32:48 -0700 Subject: [PATCH 13/69] build(deps): bump chiavdf from 1.1.4 to 1.1.6 (#18634) Bumps [chiavdf](https://github.com/Chia-Network/chiavdf) from 1.1.4 to 1.1.6. - [Release notes](https://github.com/Chia-Network/chiavdf/releases) - [Commits](https://github.com/Chia-Network/chiavdf/compare/1.1.4...1.1.6) --- updated-dependencies: - dependency-name: chiavdf dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 56 +++++++++++++++++++++++++------------------------- pyproject.toml | 2 +- 2 files changed, 29 insertions(+), 29 deletions(-) diff --git a/poetry.lock b/poetry.lock index ffdc7c02e027..d9fd48e6e08e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -930,37 +930,37 @@ files = [ [[package]] name = "chiavdf" -version = "1.1.4" +version = "1.1.6" description = "Chia vdf verification (wraps C++)" optional = false python-versions = ">=3.8" files = [ - {file = "chiavdf-1.1.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f0256d93be3e4921596576aff22254ab1810c5092c78c5d87f39a229df3fa6ca"}, - {file = "chiavdf-1.1.4-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:11a36b6f9f7752b0beb6e525cb1ea483cc78c513b0e7066dedfe6feb5eafd44a"}, - {file = "chiavdf-1.1.4-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:986673742982baa3afb7ff0e9580e23b7b7c60e367eac8c63ffbceb07bf702b1"}, - {file = "chiavdf-1.1.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:787e7fc55f4b54b1d1b9779b1abecf58142c30c9a19607e4277e4bd4ada5fb4b"}, - {file = "chiavdf-1.1.4-cp310-cp310-win_amd64.whl", hash = "sha256:7732c8731081c132ef14acab134646c91b956267310af822a0ecd03aa884647b"}, - {file = "chiavdf-1.1.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5301ca8f5f02c7d77732a5b04d0620fef3667246494b8cb7ec95155069855d58"}, - {file = "chiavdf-1.1.4-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:5b98ce04bd15295c79b88c4ba53d21fe76b2412b59e57e7c1e7352755978a084"}, - {file = "chiavdf-1.1.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29d8f0d5426f6cd1ec90d8634a97e18a468ac9c12674c64d48cdb3872f38e8b1"}, - {file = "chiavdf-1.1.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8231d3b8eaa384fda651efc20089c5ada1227c19e36212b5ad98cb7dc7c57cb"}, - {file = "chiavdf-1.1.4-cp311-cp311-win_amd64.whl", hash = "sha256:b1fc151af42e09fd2b613fe639375b8baa21dde792a3899aa1f5aa22bf2ab898"}, - {file = "chiavdf-1.1.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:debe6cca2f6f7963e815ff00a9c9b12a0159b89e1d1962269c3da7ad342af45d"}, - {file = "chiavdf-1.1.4-cp312-cp312-macosx_11_0_x86_64.whl", hash = "sha256:d1575d0b4eb9065d82566c4cae9aaa153fa0ebba7fd21fc7b5407fa3a8ec0819"}, - {file = "chiavdf-1.1.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2bef1c3173b3ec0c6b34cbc1bbc32dfbb2801e048acccf732b9fb2bb6c0ac70"}, - {file = "chiavdf-1.1.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55fc997e41e95cd24e54d308f23312d73c2759bddbcf338bd74a359359db6f6f"}, - {file = "chiavdf-1.1.4-cp312-cp312-win_amd64.whl", hash = "sha256:ac8c5ffc4a90992c05ded68a882776bdc2c916794f687f142b755aa7e7bf59cc"}, - {file = "chiavdf-1.1.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2b5cf9322da348b48d2dd0546a384a5574ffd37fd10a8c3c6d19ccfc279237fa"}, - {file = "chiavdf-1.1.4-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:99245e171be8ca34600d7f3286928bb11b53f4d29f7c0211d1767675f514791f"}, - {file = "chiavdf-1.1.4-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a5355c71598d6764a5225a13b45ea73bdfaf586e3765ba91e0b348a2456729bc"}, - {file = "chiavdf-1.1.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d48bdc419d544e068a71ab76f0d56ca6201fd4dd7269f137b8bb4521d494f12e"}, - {file = "chiavdf-1.1.4-cp38-cp38-win_amd64.whl", hash = "sha256:24269407dfb37a674f016babdb410b61c05e0d04383487acdaa78cab8df15d91"}, - {file = "chiavdf-1.1.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e9dcdf1fdfc6d0c223f662098595cd697a674de224ce9d9fa00fc574f68f2868"}, - {file = "chiavdf-1.1.4-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:90b3e21e30227c71ef012c55b0625cd19852fcfa3a080054779039d0478c1f8a"}, - {file = "chiavdf-1.1.4-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e62e809294dbed49f4ac61ffda5d4e86a6b585d6cb29710ba6b630f90702de52"}, - {file = "chiavdf-1.1.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:892d82aae2871ed9e57dd5efa42f53a91c6c12c9d46e45e1d224ae1a9a4e6a78"}, - {file = "chiavdf-1.1.4-cp39-cp39-win_amd64.whl", hash = "sha256:e68c6a5610b5a451ce1d1ea058f34320b35419b53b11da4a9affd8ab4c86e325"}, - {file = "chiavdf-1.1.4.tar.gz", hash = "sha256:17588eff94437a99640e568b7ef4ebcc76596b925f332b47f74f29b50622c98c"}, + {file = "chiavdf-1.1.6-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:0e7c7a0032d14ef11ed12bb6144437d4057d1c2ce435e1da7165659422e8e486"}, + {file = "chiavdf-1.1.6-cp310-cp310-macosx_13_0_x86_64.whl", hash = "sha256:728fe9fa328e134f9b08c46d4e535e6d24e55a0fbbf98c1008a32d63b22e1a3b"}, + {file = "chiavdf-1.1.6-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:34b682795f5c1348cf6b95fb60acf69649a7bd9fac8b890c9cecff8654798f36"}, + {file = "chiavdf-1.1.6-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:89eb391a43ee804bf410a76748d5a725fdb18989e17e9dffec4de5f57413c0f4"}, + {file = "chiavdf-1.1.6-cp310-cp310-win_amd64.whl", hash = "sha256:ca57ceb1e0410bcde5d7b6fdcfa1d9a5b05fb0c6e6d78d6a6cc6df6518eb6e09"}, + {file = "chiavdf-1.1.6-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:a1b109417191071590f36268bd8f7c633b708f023dfe52372756ee3ef9f2466a"}, + {file = "chiavdf-1.1.6-cp311-cp311-macosx_13_0_x86_64.whl", hash = "sha256:c7766e94c84fae64b95e4af16c63a9a44a3e9ba382f896ff268048e40be8f9f6"}, + {file = "chiavdf-1.1.6-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:8ac0719cd64b22895121fdbc5a3497ce96ef7e5ba88b0d57c4a6146114a80c11"}, + {file = "chiavdf-1.1.6-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:b3b3fd2631b3e3b795b14d1d6143bad6aa73ca6f8cd67824da551a9a8ba95435"}, + {file = "chiavdf-1.1.6-cp311-cp311-win_amd64.whl", hash = "sha256:c517489d01b7fe775f7230aebea57cfdd2257300b5855c27fb39b5818f912138"}, + {file = "chiavdf-1.1.6-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:3f0c662d45aa99a1121ac4b79588f328bdd88fe9739d06785a5a18454bb16388"}, + {file = "chiavdf-1.1.6-cp312-cp312-macosx_13_0_x86_64.whl", hash = "sha256:523125900b3909aeaeca11e4fe3406316f1f7b00f5323f60035bdece7c27d247"}, + {file = "chiavdf-1.1.6-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:481457538da7f71e46f7823e1f092a4768cf379e06d2b29e0e2fa167045b5ce6"}, + {file = "chiavdf-1.1.6-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:0a4ec91e53729c966f6fb43c63e3355dc585dd9c00d020176d214f86afa0af87"}, + {file = "chiavdf-1.1.6-cp312-cp312-win_amd64.whl", hash = "sha256:2db5542a7e11af42a03c63709e1e71ac119b25f694cae640e450369deee32003"}, + {file = "chiavdf-1.1.6-cp38-cp38-macosx_13_0_arm64.whl", hash = "sha256:d30c6ef55d8bbccda0fc96fdca295acb47673fb729287e58691c5da2248ce264"}, + {file = "chiavdf-1.1.6-cp38-cp38-macosx_13_0_x86_64.whl", hash = "sha256:978311d09e07bbd0c807fd8dee8d243a01b8f9b6bebe909b5a33a75a6e6fd244"}, + {file = "chiavdf-1.1.6-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:de4d4e5123724b23259bb3fbc9d89e8e225e129e518b3325446b994624bfd880"}, + {file = "chiavdf-1.1.6-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:9f32049e97b407bc5e7e2536c91589026272a4c608fb0f22dd4e8e982fa740c8"}, + {file = "chiavdf-1.1.6-cp38-cp38-win_amd64.whl", hash = "sha256:88a752a9f3b4cc7cfec517af0b74eee15581474d6f27c4f21cd468ba1a29878d"}, + {file = "chiavdf-1.1.6-cp39-cp39-macosx_13_0_arm64.whl", hash = "sha256:9b7f6cd025cc71128f0a467d07eb1ea0b76a074892a50ae76c2094fc8deb93d4"}, + {file = "chiavdf-1.1.6-cp39-cp39-macosx_13_0_x86_64.whl", hash = "sha256:cbdd824114d28e4c0c17ba1e14492b04f440b7cf6697ad582d541b9f7e01e79b"}, + {file = "chiavdf-1.1.6-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:183f8380170ac749d2b479172394118d2536b0a4d02ef56c0e630d22d545e7a3"}, + {file = "chiavdf-1.1.6-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:612518b22da3339d2a1f42711d53d4f0353c21aad1683ad8b86c5ef0e2e49871"}, + {file = "chiavdf-1.1.6-cp39-cp39-win_amd64.whl", hash = "sha256:5cc41e58f751ed156f475905d8d4415e6f8285ce3ee64127496325ea62af20c2"}, + {file = "chiavdf-1.1.6.tar.gz", hash = "sha256:bf32ad4f114db49c9839ff18b7fc704582e162923780751420838830cd92bac6"}, ] [[package]] @@ -3457,4 +3457,4 @@ upnp = ["miniupnpc"] [metadata] lock-version = "2.0" python-versions = ">=3.8.10, <3.13" -content-hash = "0d1bba535311a5b058758bcae6a8f720de7c31e9688ed5388b96a1edbb2acb9d" +content-hash = "dace0d72adc211850f2bcf7c41177908240702ee7aedb45683ccd5610ed12e7e" diff --git a/pyproject.toml b/pyproject.toml index 178da24bffe9..44836732f06b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -47,7 +47,7 @@ boto3 = "1.34.143" # AWS S3 for Data Layer S3 plugin chiabip158 = "1.5.1" # bip158-style wallet filters chiapos = "2.0.4" # proof of space chia_rs = "0.14.0" -chiavdf = "1.1.4" # timelord and vdf verification +chiavdf = "1.1.6" # timelord and vdf verification click = "8.1.7" # For the CLI clvm = "0.9.10" clvm_tools = "0.4.9" # Currying Program.to other conveniences From 7759619b585d41b227a515696beb9775c4e733a4 Mon Sep 17 00:00:00 2001 From: Earle Lowe <30607889+emlowe@users.noreply.github.com> Date: Mon, 30 Sep 2024 09:33:08 -0700 Subject: [PATCH 14/69] [CHIA-1427]: Limit full file creation when processing subscription generations (#18612) * Some logging and some code to limit full file generation based on the max number of full files allowed * Don't write out full files that aren't needed * Black fixes * Adjust the full file during error conditions * No need for try --- .../_tests/core/data_layer/test_data_store.py | 14 +++++- chia/data_layer/data_layer.py | 43 +++++++++++++++---- chia/data_layer/download_data.py | 23 +++++++--- 3 files changed, 63 insertions(+), 17 deletions(-) diff --git a/chia/_tests/core/data_layer/test_data_store.py b/chia/_tests/core/data_layer/test_data_store.py index 5bdd76487503..1c21a6ed9e1d 100644 --- a/chia/_tests/core/data_layer/test_data_store.py +++ b/chia/_tests/core/data_layer/test_data_store.py @@ -1369,6 +1369,7 @@ async def mock_http_download( data_store=data_store, store_id=store_id, existing_generation=3, + target_generation=4, root_hashes=[bytes32.random(seeded_random)], server_info=sinfo, client_foldername=tmp_path, @@ -1392,6 +1393,7 @@ async def mock_http_download( data_store=data_store, store_id=store_id, existing_generation=3, + target_generation=4, root_hashes=[bytes32.random(seeded_random)], server_info=sinfo, client_foldername=tmp_path, @@ -1830,6 +1832,7 @@ async def test_delete_store_data_protects_pending_roots(raw_data_store: DataStor @pytest.mark.anyio @boolean_datacases(name="group_files_by_store", true="group by singleton", false="don't group by singleton") +@pytest.mark.parametrize("max_full_files", [1, 2, 5]) async def test_insert_from_delta_file( data_store: DataStore, store_id: bytes32, @@ -1837,6 +1840,7 @@ async def test_insert_from_delta_file( tmp_path: Path, seeded_random: random.Random, group_files_by_store: bool, + max_full_files: int, ) -> None: await data_store.create_tree(store_id=store_id, status=Status.COMMITTED) num_files = 5 @@ -1908,6 +1912,7 @@ async def mock_http_download_2( data_store=data_store, store_id=store_id, existing_generation=0, + target_generation=num_files + 1, root_hashes=root_hashes, server_info=sinfo, client_foldername=tmp_path_1, @@ -1916,6 +1921,7 @@ async def mock_http_download_2( proxy_url="", downloader=None, group_files_by_store=group_files_by_store, + maximum_full_file_count=max_full_files, ) assert not success @@ -1929,6 +1935,7 @@ async def mock_http_download_2( data_store=data_store, store_id=store_id, existing_generation=0, + target_generation=num_files + 1, root_hashes=root_hashes, server_info=sinfo, client_foldername=tmp_path_1, @@ -1937,6 +1944,7 @@ async def mock_http_download_2( proxy_url="", downloader=None, group_files_by_store=group_files_by_store, + maximum_full_file_count=max_full_files, ) assert success @@ -1944,7 +1952,7 @@ async def mock_http_download_2( assert root.generation == num_files + 1 with os.scandir(store_path) as entries: filenames = {entry.name for entry in entries} - assert len(filenames) == 2 * (num_files + 1) + assert len(filenames) == num_files + 1 + max_full_files # 6 deltas and max_full_files full files kv = await data_store.get_keys_values(store_id=store_id) assert kv == kv_before @@ -2032,6 +2040,7 @@ async def test_insert_from_delta_file_correct_file_exists( data_store=data_store, store_id=store_id, existing_generation=0, + target_generation=num_files + 1, root_hashes=root_hashes, server_info=sinfo, client_foldername=tmp_path, @@ -2047,7 +2056,7 @@ async def test_insert_from_delta_file_correct_file_exists( assert root.generation == num_files + 1 with os.scandir(store_path) as entries: filenames = {entry.name for entry in entries} - assert len(filenames) == 2 * (num_files + 1) + assert len(filenames) == num_files + 2 # 1 full and 6 deltas kv = await data_store.get_keys_values(store_id=store_id) assert kv == kv_before @@ -2094,6 +2103,7 @@ async def test_insert_from_delta_file_incorrect_file_exists( data_store=data_store, store_id=store_id, existing_generation=1, + target_generation=6, root_hashes=[incorrect_root_hash], server_info=sinfo, client_foldername=tmp_path, diff --git a/chia/data_layer/data_layer.py b/chia/data_layer/data_layer.py index bcbf7b1871c5..0c0360a0c5ea 100644 --- a/chia/data_layer/data_layer.py +++ b/chia/data_layer/data_layer.py @@ -568,6 +568,7 @@ async def fetch_and_validate(self, store_id: bytes32) -> None: servers_info = await self.data_store.get_available_servers_for_store(store_id, timestamp) # TODO: maybe append a random object to the whole DataLayer class? random.shuffle(servers_info) + success = False for server_info in servers_info: url = server_info.url @@ -600,14 +601,16 @@ async def fetch_and_validate(self, store_id: bytes32) -> None: self.data_store, store_id, root.generation, - [record.root for record in reversed(to_download)], - server_info, - self.server_files_location, - self.client_timeout, - self.log, - proxy_url, - await self.get_downloader(store_id, url), - self.group_files_by_store, + target_generation=singleton_record.generation, + root_hashes=[record.root for record in reversed(to_download)], + server_info=server_info, + client_foldername=self.server_files_location, + timeout=self.client_timeout, + log=self.log, + proxy_url=proxy_url, + downloader=await self.get_downloader(store_id, url), + group_files_by_store=self.group_files_by_store, + maximum_full_file_count=self.maximum_full_file_count, ) if success: self.log.info( @@ -621,6 +624,30 @@ async def fetch_and_validate(self, store_id: bytes32) -> None: except Exception as e: self.log.warning(f"Exception while downloading files for {store_id}: {e} {traceback.format_exc()}.") + # if there aren't any servers then don't try to write the full tree + if not success and len(servers_info) > 0: + root = await self.data_store.get_tree_root(store_id=store_id) + if root.node_hash is None: + return + filename_full_tree = get_full_tree_filename_path( + foldername=self.server_files_location, + store_id=store_id, + node_hash=root.node_hash, + generation=root.generation, + group_by_store=self.group_files_by_store, + ) + # Had trouble with this generation, so generate full file for the generation we currently have + if not os.path.exists(filename_full_tree): + with open(filename_full_tree, "wb") as writer: + await self.data_store.write_tree_to_file( + root=root, + node_hash=root.node_hash, + store_id=store_id, + deltas_only=False, + writer=writer, + ) + self.log.info(f"Successfully written full tree filename {filename_full_tree}.") + async def get_downloader(self, store_id: bytes32, url: str) -> Optional[PluginRemote]: request_json = {"store_id": store_id.hex(), "url": url} for d in self.downloaders: diff --git a/chia/data_layer/download_data.py b/chia/data_layer/download_data.py index 1e0e4dae5a1e..331e2cb50b1f 100644 --- a/chia/data_layer/download_data.py +++ b/chia/data_layer/download_data.py @@ -92,7 +92,8 @@ async def insert_into_data_store_from_file( store_id: bytes32, root_hash: Optional[bytes32], filename: Path, -) -> None: +) -> int: + num_inserted = 0 with open(filename, "rb") as reader: while True: chunk = b"" @@ -119,8 +120,10 @@ async def insert_into_data_store_from_file( node_type = NodeType.TERMINAL if serialized_node.is_terminal else NodeType.INTERNAL await data_store.insert_node(node_type, serialized_node.value1, serialized_node.value2) + num_inserted += 1 await data_store.insert_root_with_ancestor_table(store_id=store_id, node_hash=root_hash, status=Status.COMMITTED) + return num_inserted @dataclass @@ -233,6 +236,7 @@ async def insert_from_delta_file( data_store: DataStore, store_id: bytes32, existing_generation: int, + target_generation: int, root_hashes: List[bytes32], server_info: ServerInfo, client_foldername: Path, @@ -241,6 +245,7 @@ async def insert_from_delta_file( proxy_url: str, downloader: Optional[PluginRemote], group_files_by_store: bool = False, + maximum_full_file_count: int = 1, ) -> bool: if group_files_by_store: client_foldername.joinpath(f"{store_id}").mkdir(parents=True, exist_ok=True) @@ -283,7 +288,7 @@ async def insert_from_delta_file( existing_generation, group_files_by_store, ) - await insert_into_data_store_from_file( + num_inserted = await insert_into_data_store_from_file( data_store, store_id, None if root_hash == bytes32([0] * 32) else root_hash, @@ -291,13 +296,17 @@ async def insert_from_delta_file( ) log.info( f"Successfully inserted hash {root_hash} from delta file. " - f"Generation: {existing_generation}. Store id: {store_id}." + f"Generation: {existing_generation}. Store id: {store_id}. Nodes inserted: {num_inserted}." ) - root = await data_store.get_tree_root(store_id=store_id) - with open(filename_full_tree, "wb") as writer: - await data_store.write_tree_to_file(root, root_hash, store_id, False, writer) - log.info(f"Successfully written full tree filename {filename_full_tree}.") + if target_generation - existing_generation <= maximum_full_file_count - 1: + root = await data_store.get_tree_root(store_id=store_id) + with open(filename_full_tree, "wb") as writer: + await data_store.write_tree_to_file(root, root_hash, store_id, False, writer) + log.info(f"Successfully written full tree filename {filename_full_tree}.") + else: + log.info(f"Skipping full file generation for {existing_generation}") + await data_store.received_correct_file(store_id, server_info) except Exception: try: From b4137651b6b2107f88b4a83e64b7b2c918c5c3f3 Mon Sep 17 00:00:00 2001 From: Kyle Altendorf Date: Mon, 30 Sep 2024 14:02:21 -0400 Subject: [PATCH 15/69] maybe fixup main coverage diff (#18645) maybe fixup `main` coverage diff --- .github/workflows/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 99b602d5b7c4..44d80caec6c8 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -292,7 +292,7 @@ jobs: - name: Coverage report (diff) if: (github.base_ref != '' || github.event.before != '') && always() env: - compare-branch: ${{ github.base_ref == '' && steps.parent-commit.hash || format('origin/{0}', github.base_ref) }} + compare-branch: ${{ github.base_ref == '' && steps.parent-commit.outputs.hash || format('origin/{0}', github.base_ref) }} run: | set -o pipefail diff-cover --config-file=.diffcover.toml --compare-branch=${{ env.compare-branch }} --fail-under=100 --html-report=coverage-reports/diff-cover.html --markdown-report=coverage-reports/diff-cover.md coverage-reports/coverage.xml | tee coverage-reports/diff-cover-stdout From 9109d8926d15bb956c6a246c4847903791bac7f4 Mon Sep 17 00:00:00 2001 From: Amine Khaldi Date: Tue, 1 Oct 2024 15:25:46 +0100 Subject: [PATCH 16/69] CHIA-1525 Make PreValidationResult take SpendBundleConditions instead of NPCResult (#18647) Make PreValidationResult take SpendBundleConditions instead of NPCResult. --- chia/_tests/blockchain/test_blockchain.py | 11 +++-- chia/_tests/core/full_node/test_full_node.py | 5 +- chia/consensus/block_body_validation.py | 50 ++++++++------------ chia/consensus/blockchain.py | 9 ++-- chia/consensus/multiprocess_validation.py | 14 +++--- chia/full_node/full_node.py | 8 ++-- 6 files changed, 44 insertions(+), 53 deletions(-) diff --git a/chia/_tests/blockchain/test_blockchain.py b/chia/_tests/blockchain/test_blockchain.py index fcac27785fd4..a6d65d8fc92b 100644 --- a/chia/_tests/blockchain/test_blockchain.py +++ b/chia/_tests/blockchain/test_blockchain.py @@ -2641,7 +2641,10 @@ async def test_cost_exceeds_max( diff = b.constants.DIFFICULTY_STARTING err = ( await b.add_block( - blocks[-1], PreValidationResult(None, uint64(1), npc_result, True, uint32(0)), None, sub_slot_iters=ssi + blocks[-1], + PreValidationResult(None, uint64(1), npc_result.conds, True, uint32(0)), + None, + sub_slot_iters=ssi, ) )[1] assert err in [Err.BLOCK_COST_EXCEEDS_MAX] @@ -2717,7 +2720,7 @@ async def test_invalid_cost_in_block( ) ssi = b.constants.SUB_SLOT_ITERS_STARTING _, err, _ = await b.add_block( - block_2, PreValidationResult(None, uint64(1), npc_result, False, uint32(0)), None, sub_slot_iters=ssi + block_2, PreValidationResult(None, uint64(1), npc_result.conds, False, uint32(0)), None, sub_slot_iters=ssi ) assert err == Err.INVALID_BLOCK_COST @@ -2746,7 +2749,7 @@ async def test_invalid_cost_in_block( constants=bt.constants, ) _, err, _ = await b.add_block( - block_2, PreValidationResult(None, uint64(1), npc_result, False, uint32(0)), None, sub_slot_iters=ssi + block_2, PreValidationResult(None, uint64(1), npc_result.conds, False, uint32(0)), None, sub_slot_iters=ssi ) assert err == Err.INVALID_BLOCK_COST @@ -2777,7 +2780,7 @@ async def test_invalid_cost_in_block( ) result, err, _ = await b.add_block( - block_2, PreValidationResult(None, uint64(1), npc_result, False, uint32(0)), None, sub_slot_iters=ssi + block_2, PreValidationResult(None, uint64(1), npc_result.conds, False, uint32(0)), None, sub_slot_iters=ssi ) assert err == Err.INVALID_BLOCK_COST diff --git a/chia/_tests/core/full_node/test_full_node.py b/chia/_tests/core/full_node/test_full_node.py index 0f55b8736f28..594827efbc23 100644 --- a/chia/_tests/core/full_node/test_full_node.py +++ b/chia/_tests/core/full_node/test_full_node.py @@ -741,9 +741,8 @@ async def test_respond_unfinished(self, wallet_nodes, self_hostname): assert entry is not None result = entry.result assert result is not None - assert result.npc_result is not None - assert result.npc_result.conds is not None - assert result.npc_result.conds.cost > 0 + assert result.conds is not None + assert result.conds.cost > 0 assert not full_node_1.full_node.blockchain.contains_block(block.header_hash) assert block.transactions_generator is not None diff --git a/chia/consensus/block_body_validation.py b/chia/consensus/block_body_validation.py index 1ef2908877c5..749b6763b850 100644 --- a/chia/consensus/block_body_validation.py +++ b/chia/consensus/block_body_validation.py @@ -14,7 +14,6 @@ from chia.consensus.blockchain_interface import BlockRecordsProtocol from chia.consensus.coinbase import create_farmer_coin, create_pool_coin from chia.consensus.constants import ConsensusConstants -from chia.consensus.cost_calculator import NPCResult from chia.full_node.mempool_check_conditions import mempool_check_time_locks from chia.types.blockchain_format.coin import Coin from chia.types.blockchain_format.sized_bytes import bytes32 @@ -125,19 +124,22 @@ async def validate_block_body( get_coin_records: Callable[[Collection[bytes32]], Awaitable[List[CoinRecord]]], block: Union[FullBlock, UnfinishedBlock], height: uint32, - npc_result: Optional[NPCResult], + conds: Optional[SpendBundleConditions], fork_info: ForkInfo, bls_cache: Optional[BLSCache], *, validate_signature: bool = True, -) -> Tuple[Optional[Err], Optional[NPCResult]]: +) -> Tuple[Optional[Err], Optional[SpendBundleConditions]]: """ This assumes the header block has been completely validated. - Validates the transactions and body of the block. Returns None for the first value if everything - validates correctly, or an Err if something does not validate. For the second value, returns a CostResult - only if validation succeeded, and there are transactions. In other cases it returns None. The NPC result is - the result of running the generator with the previous generators refs. It is only present for transaction - blocks which have spent coins. + Validates the transactions and body of the block. + Returns None for the first value if everything validates correctly, or an + Err if something does not validate. + For the second value, returns a SpendBundleConditions only if validation + succeeded, and there are transactions. In other cases it returns None. + conds is the result of running the generator with the previous generators + refs. It must be set for transaction blocks and must be None for + non-transaction blocks. fork_info specifies the fork context of this block. In case the block extends the main chain, it can be empty, but if the block extends a fork of the main chain, the fork info is mandatory in order to validate the block. @@ -291,8 +293,7 @@ async def validate_block_body( if block.transactions_generator is not None: # Get List of names removed, puzzles hashes for removed coins and conditions created - assert npc_result is not None - cost = uint64(0 if npc_result.conds is None else npc_result.conds.cost) + cost = uint64(0 if conds is None else conds.cost) # 7. Check that cost <= MAX_BLOCK_COST_CLVM log.debug( @@ -303,19 +304,16 @@ async def validate_block_body( return Err.BLOCK_COST_EXCEEDS_MAX, None # 8. The CLVM program must not return any errors - if npc_result.error is not None: - return Err(npc_result.error), None + assert conds is not None - assert npc_result.conds is not None - - for spend in npc_result.conds.spends: + for spend in conds.spends: removals.append(bytes32(spend.coin_id)) removals_puzzle_dic[bytes32(spend.coin_id)] = bytes32(spend.puzzle_hash) for puzzle_hash, amount, _ in spend.create_coin: c = Coin(bytes32(spend.coin_id), bytes32(puzzle_hash), uint64(amount)) additions.append((c, c.name())) else: - assert npc_result is None + assert conds is None # 9. Check that the correct cost is in the transactions info if block.transactions_info.cost != cost: @@ -459,10 +457,7 @@ async def validate_block_body( # reserve fee cannot be greater than UINT64_MAX per consensus rule. # run_generator() would fail - assert_fee_sum: uint64 = uint64(0) - if npc_result: - assert npc_result.conds is not None - assert_fee_sum = uint64(npc_result.conds.reserve_fee) + assert_fee_sum = uint64(0 if conds is None else conds.reserve_fee) # 17. Check that the assert fee sum <= fees, and that each reserved fee is non-negative if fees < assert_fee_sum: @@ -483,24 +478,21 @@ async def validate_block_body( # 21. Verify conditions # verify absolute/relative height/time conditions - if npc_result is not None: - assert npc_result.conds is not None - + if conds is not None: error = mempool_check_time_locks( removal_coin_records, - npc_result.conds, + conds, prev_transaction_block_height, prev_transaction_block_timestamp, ) - if error: + if error is not None: return error, None # create hash_key list for aggsig check pairs_pks: List[G1Element] = [] pairs_msgs: List[bytes] = [] - if npc_result: - assert npc_result.conds is not None - pairs_pks, pairs_msgs = pkm_pairs(npc_result.conds, constants.AGG_SIG_ME_ADDITIONAL_DATA) + if conds is not None: + pairs_pks, pairs_msgs = pkm_pairs(conds, constants.AGG_SIG_ME_ADDITIONAL_DATA) # 22. Verify aggregated signature # TODO: move this to pre_validate_blocks_multiprocessing so we can sync faster @@ -520,4 +512,4 @@ async def validate_block_body( if not bls_cache.aggregate_verify(pairs_pks, pairs_msgs, block.transactions_info.aggregated_signature): return Err.BAD_AGGREGATE_SIGNATURE, None - return None, npc_result + return None, conds diff --git a/chia/consensus/blockchain.py b/chia/consensus/blockchain.py index 8f4f5a3b22ea..82ad73687c88 100644 --- a/chia/consensus/blockchain.py +++ b/chia/consensus/blockchain.py @@ -338,7 +338,6 @@ async def add_block( if prev_block.height + 1 != block.height: return AddBlockResult.INVALID_BLOCK, Err.INVALID_HEIGHT, None - npc_result: Optional[NPCResult] = pre_validation_result.npc_result required_iters = pre_validation_result.required_iters if pre_validation_result.error is not None: return AddBlockResult.INVALID_BLOCK, Err(pre_validation_result.error), None @@ -412,7 +411,7 @@ async def add_block( # main chain, we still need to re-run it to update the additions and # removals in fork_info. await self.advance_fork_info(block, fork_info) - fork_info.include_spends(None if npc_result is None else npc_result.conds, block, header_hash) + fork_info.include_spends(pre_validation_result.conds, block, header_hash) self.add_block_record(block_rec) return AddBlockResult.ALREADY_HAVE_BLOCK, None, None @@ -431,7 +430,7 @@ async def add_block( self.coin_store.get_coin_records, block, block.height, - npc_result, + pre_validation_result.conds, fork_info, bls_cache, # If we did not already validate the signature, validate it now @@ -444,7 +443,7 @@ async def add_block( # case we're validating blocks on a fork, the next block validation will # need to know of these additions and removals. Also, _reconsider_peak() # will need these results - fork_info.include_spends(None if npc_result is None else npc_result.conds, block, header_hash) + fork_info.include_spends(pre_validation_result.conds, block, header_hash) # block_to_block_record() require the previous block in the cache if not genesis and prev_block is not None: @@ -781,7 +780,7 @@ async def validate_unfinished_block( self.coin_store.get_coin_records, block, uint32(prev_height + 1), - npc_result, + None if npc_result is None else npc_result.conds, fork_info, None, validate_signature=False, # Signature was already validated before calling this method, no need to validate diff --git a/chia/consensus/multiprocess_validation.py b/chia/consensus/multiprocess_validation.py index dc80c91016bb..76591ceace6c 100644 --- a/chia/consensus/multiprocess_validation.py +++ b/chia/consensus/multiprocess_validation.py @@ -43,7 +43,7 @@ class PreValidationResult(Streamable): error: Optional[uint16] required_iters: Optional[uint64] # Iff error is None - npc_result: Optional[NPCResult] # Iff error is None and block is a transaction block + conds: Optional[SpendBundleConditions] # Iff error is None and block is a transaction block validated_signature: bool timing: uint32 # the time (in milliseconds) it took to pre-validate the block @@ -95,7 +95,7 @@ def batch_pre_validate_blocks( validation_time = time.monotonic() - validation_start results.append( PreValidationResult( - uint16(npc_result.error), None, npc_result, False, uint32(validation_time * 1000) + uint16(npc_result.error), None, npc_result.conds, False, uint32(validation_time * 1000) ) ) continue @@ -144,7 +144,7 @@ def batch_pre_validate_blocks( PreValidationResult( error_int, required_iters, - None if conds is None else NPCResult(None, conds), + conds, successfully_validated_signatures, uint32(validation_time * 1000), ) @@ -164,7 +164,7 @@ async def pre_validate_blocks_multiprocessing( block_records: BlocksProtocol, blocks: Sequence[FullBlock], pool: Executor, - npc_results: Dict[uint32, NPCResult], + block_height_conds_map: Dict[uint32, SpendBundleConditions], *, sub_slot_iters: uint64, difficulty: uint64, @@ -275,10 +275,8 @@ async def pre_validate_blocks_multiprocessing( prev_ses_block = block_rec conditions_pickled = {} - for k, v in npc_results.items(): - assert v.error is None - assert v.conds is not None - conditions_pickled[k] = bytes(v.conds) + for k, v in block_height_conds_map.items(): + conditions_pickled[k] = bytes(v) futures = [] # Pool of workers to validate blocks concurrently recent_blocks_bytes = {bytes(k): bytes(v) for k, v in recent_blocks.items()} # convert to bytes diff --git a/chia/full_node/full_node.py b/chia/full_node/full_node.py index c2c78cd43bdb..b4068fea8956 100644 --- a/chia/full_node/full_node.py +++ b/chia/full_node/full_node.py @@ -1846,9 +1846,9 @@ async def add_block( return None validation_start = time.monotonic() # Tries to add the block to the blockchain, if we already validated transactions, don't do it again - npc_results = {} - if pre_validation_result is not None and pre_validation_result.npc_result is not None: - npc_results[block.height] = pre_validation_result.npc_result + block_height_conds_map = {} + if pre_validation_result is not None and pre_validation_result.conds is not None: + block_height_conds_map[block.height] = pre_validation_result.conds # Don't validate signatures because we want to validate them in the main thread later, since we have a # cache available @@ -1868,7 +1868,7 @@ async def add_block( self.blockchain, [block], self.blockchain.pool, - npc_results, + block_height_conds_map, sub_slot_iters=ssi, difficulty=diff, prev_ses_block=prev_ses_block, From 0ab72914fa071d6be6434f875cfc55a36e538c8a Mon Sep 17 00:00:00 2001 From: Earle Lowe <30607889+emlowe@users.noreply.github.com> Date: Tue, 1 Oct 2024 11:24:26 -0700 Subject: [PATCH 17/69] Update GUI pin to `71bae1c072941217cde26a70e864d9add1de07f9` (#18656) Update GUI pin to --- chia-blockchain-gui | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/chia-blockchain-gui b/chia-blockchain-gui index fa2a9969f2c7..71bae1c07294 160000 --- a/chia-blockchain-gui +++ b/chia-blockchain-gui @@ -1 +1 @@ -Subproject commit fa2a9969f2c78b9f69f8bf9b6fc25c013a505520 +Subproject commit 71bae1c072941217cde26a70e864d9add1de07f9 From 4befaf54bc545f7a78168e1cccc373324a4100e1 Mon Sep 17 00:00:00 2001 From: Geoff Walmsley <5695735+geoffwalmsley@users.noreply.github.com> Date: Thu, 3 Oct 2024 16:26:33 +0100 Subject: [PATCH 18/69] Fix DID balance reporting, and port DID tests to WalletTestFramwork (#18662) * fix double counting in did balances * port did_tests to WalletTestFramework * pylint * update did tests in test_nft_wallet * update vc tests --- chia/_tests/wallet/did_wallet/test_did.py | 1583 ++++++++++++----- .../wallet/nft_wallet/test_nft_wallet.py | 25 + .../_tests/wallet/vc_wallet/test_vc_wallet.py | 8 +- chia/wallet/did_wallet/did_wallet.py | 10 +- chia/wallet/wallet_state_manager.py | 3 +- 5 files changed, 1142 insertions(+), 487 deletions(-) diff --git a/chia/_tests/wallet/did_wallet/test_did.py b/chia/_tests/wallet/did_wallet/test_did.py index 1546dae650f9..b023fd6beffa 100644 --- a/chia/_tests/wallet/did_wallet/test_did.py +++ b/chia/_tests/wallet/did_wallet/test_did.py @@ -2,11 +2,11 @@ import dataclasses import json -from typing import Optional import pytest from chia_rs import AugSchemeMPL, G1Element, G2Element +from chia._tests.conftest import ConsensusMode from chia._tests.environments.wallet import WalletStateTransition, WalletTestFramework from chia._tests.util.setup_nodes import OldSimulatorsAndWallets from chia._tests.util.time_out_assert import time_out_assert @@ -38,6 +38,8 @@ def get_parent_num(did_wallet: DIDWallet): class TestDIDWallet: + # TODO: See Issue CHIA-1544 + # This test should be ported to WalletTestFramework once we can replace keys in the wallet node @pytest.mark.parametrize( "trusted", [True, False], @@ -110,7 +112,7 @@ async def test_creation_from_coin_spend( == json.loads(all_node_1_wallets[1].data)["current_inner"] ) - # TODO: Porting this test to this fixture revealed some balance peculiarities. Fix them. + @pytest.mark.limit_consensus_modes(allowed=[ConsensusMode.PLAIN], reason="irrelevant") @pytest.mark.parametrize( "wallet_environments", [ @@ -163,8 +165,8 @@ async def test_creation_from_backup_file(self, wallet_environments: WalletTestFr "did": { "init": True, "unconfirmed_wallet_balance": 101, - "pending_change": 202, # TODO: this is not correct, fix this - "pending_coin_removal_count": 2, # TODO: this might not be correct + "pending_change": 101, + "pending_coin_removal_count": 1, }, }, post_block_balance_updates={ @@ -180,8 +182,8 @@ async def test_creation_from_backup_file(self, wallet_environments: WalletTestFr "spendable_balance": 101, "max_send_amount": 101, "unspent_coin_count": 1, - "pending_change": -202, # TODO: this is not correct, fix this - "pending_coin_removal_count": -2, # TODO: this might not be correct + "pending_change": -101, + "pending_coin_removal_count": -1, }, }, ), @@ -224,8 +226,8 @@ async def test_creation_from_backup_file(self, wallet_environments: WalletTestFr "did": { "init": True, "unconfirmed_wallet_balance": 201, - "pending_change": 402, # TODO: this is not correct, fix this - "pending_coin_removal_count": 2, # TODO: this might not be correct + "pending_change": 201, + "pending_coin_removal_count": 1, }, }, post_block_balance_updates={ @@ -241,8 +243,8 @@ async def test_creation_from_backup_file(self, wallet_environments: WalletTestFr "spendable_balance": 201, "max_send_amount": 201, "unspent_coin_count": 1, - "pending_change": -402, # TODO: this is not correct, fix this - "pending_coin_removal_count": -2, # TODO: this might not be correct + "pending_change": -201, + "pending_coin_removal_count": -1, }, }, ), @@ -288,6 +290,7 @@ async def test_creation_from_backup_file(self, wallet_environments: WalletTestFr "spendable_balance": -101, "pending_change": 101, "pending_coin_removal_count": 1, + "max_send_amount": -101, } }, post_block_balance_updates={ @@ -295,6 +298,7 @@ async def test_creation_from_backup_file(self, wallet_environments: WalletTestFr "spendable_balance": 101, "pending_change": -101, "pending_coin_removal_count": -1, + "max_send_amount": 101, } }, ), @@ -394,14 +398,13 @@ async def test_creation_from_backup_file(self, wallet_environments: WalletTestFr "did": { "unconfirmed_wallet_balance": -201, "spendable_balance": -201, - # "max_send_amount": -201, # TODO: Uncomment this + "max_send_amount": -201, "pending_coin_removal_count": 1, } }, post_block_balance_updates={ "did": { "confirmed_wallet_balance": -201, - "max_send_amount": -201, # TODO: Delete this when uncommented above "unspent_coin_count": -1, "pending_coin_removal_count": -1, } @@ -421,97 +424,178 @@ async def get_coins_with_ph() -> bool: for wallet in [did_wallet_0, did_wallet_1]: assert wallet.wallet_state_manager.wallets[wallet.id()] == wallet - @pytest.mark.parametrize( - "trusted", - [True, False], - ) + @pytest.mark.limit_consensus_modes(allowed=[ConsensusMode.PLAIN], reason="irrelevant") + @pytest.mark.parametrize("wallet_environments", [{"num_environments": 2, "blocks_needed": [1, 1]}], indirect=True) @pytest.mark.anyio - async def test_did_recovery_with_multiple_backup_dids(self, self_hostname, two_wallet_nodes, trusted): - full_nodes, wallets, _ = two_wallet_nodes - full_node_api = full_nodes[0] - server_1 = full_node_api.server - wallet_node, server_2 = wallets[0] - wallet_node_2, server_3 = wallets[1] - wallet = wallet_node.wallet_state_manager.main_wallet - wallet2 = wallet_node_2.wallet_state_manager.main_wallet - - if trusted: - wallet_node.config["trusted_peers"] = { - full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex() - } - wallet_node_2.config["trusted_peers"] = { - full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex() - } - else: - wallet_node.config["trusted_peers"] = {} - wallet_node_2.config["trusted_peers"] = {} - await server_2.start_client(PeerInfo(self_hostname, server_1.get_port()), None) - await server_3.start_client(PeerInfo(self_hostname, server_1.get_port()), None) + async def test_did_recovery_with_multiple_backup_dids(self, wallet_environments: WalletTestFramework): + env_0 = wallet_environments.environments[0] + env_1 = wallet_environments.environments[1] + wallet_node_0 = env_0.node + wallet_node_1 = env_1.node + wallet_0 = env_0.xch_wallet + wallet_1 = env_1.xch_wallet - await full_node_api.farm_blocks_to_wallet(1, wallet) - await full_node_api.farm_blocks_to_wallet(1, wallet2) + env_0.wallet_aliases = { + "xch": 1, + "did": 2, + } + env_1.wallet_aliases = { + "xch": 1, + "did": 2, + } - async with wallet.wallet_state_manager.new_action_scope(DEFAULT_TX_CONFIG, push=True) as action_scope: + async with wallet_0.wallet_state_manager.new_action_scope( + wallet_environments.tx_config, push=True + ) as action_scope: did_wallet: DIDWallet = await DIDWallet.create_new_did_wallet( - wallet_node.wallet_state_manager, wallet, uint64(101), action_scope + wallet_node_0.wallet_state_manager, wallet_0, uint64(101), action_scope ) assert did_wallet.get_name() == "Profile 1" - await full_node_api.process_transaction_records(records=action_scope.side_effects.transactions) - await full_node_api.wait_for_wallets_synced(wallet_nodes=[wallet_node, wallet_node_2]) - - await time_out_assert(15, did_wallet.get_confirmed_balance, 101) - await time_out_assert(15, did_wallet.get_unconfirmed_balance, 101) + recovery_list = [bytes32.from_hexstr(did_wallet.get_my_DID())] - recovery_list = [bytes.fromhex(did_wallet.get_my_DID())] - - async with wallet2.wallet_state_manager.new_action_scope(DEFAULT_TX_CONFIG, push=True) as action_scope: + async with wallet_1.wallet_state_manager.new_action_scope( + wallet_environments.tx_config, push=True + ) as action_scope: did_wallet_2: DIDWallet = await DIDWallet.create_new_did_wallet( - wallet_node_2.wallet_state_manager, wallet2, uint64(101), action_scope, recovery_list + wallet_node_1.wallet_state_manager, wallet_1, uint64(101), action_scope, recovery_list ) - await full_node_api.process_transaction_records(records=action_scope.side_effects.transactions) - await full_node_api.wait_for_wallets_synced(wallet_nodes=[wallet_node, wallet_node_2]) - - await time_out_assert(15, did_wallet_2.get_confirmed_balance, 101) - await time_out_assert(15, did_wallet_2.get_unconfirmed_balance, 101) - + await wallet_environments.process_pending_states( + [ + WalletStateTransition( + pre_block_balance_updates={ + "xch": { + "set_remainder": True, + }, + "did": { + "init": True, + "unconfirmed_wallet_balance": 101, + "pending_change": 101, + "pending_coin_removal_count": 1, + }, + }, + post_block_balance_updates={ + "xch": { + "set_remainder": True, + }, + "did": { + "confirmed_wallet_balance": 101, + "spendable_balance": 101, + "max_send_amount": 101, + "unspent_coin_count": 1, + "pending_change": -101, + "pending_coin_removal_count": -1, + }, + }, + ), + WalletStateTransition( + pre_block_balance_updates={ + "xch": { + "set_remainder": True, + }, + "did": { + "init": True, + "unconfirmed_wallet_balance": 101, + "pending_change": 101, + "pending_coin_removal_count": 1, + }, + }, + post_block_balance_updates={ + "xch": { + "set_remainder": True, + }, + "did": { + "confirmed_wallet_balance": 101, + "spendable_balance": 101, + "max_send_amount": 101, + "unspent_coin_count": 1, + "pending_change": -101, + "pending_coin_removal_count": -1, + }, + }, + ), + ] + ) assert did_wallet_2.did_info.backup_ids == recovery_list - recovery_list.append(bytes.fromhex(did_wallet_2.get_my_DID())) + recovery_list.append(bytes32.from_hexstr(did_wallet_2.get_my_DID())) - async with wallet2.wallet_state_manager.new_action_scope(DEFAULT_TX_CONFIG, push=True) as action_scope: + async with wallet_1.wallet_state_manager.new_action_scope( + wallet_environments.tx_config, push=True + ) as action_scope: did_wallet_3: DIDWallet = await DIDWallet.create_new_did_wallet( - wallet_node_2.wallet_state_manager, wallet2, uint64(201), action_scope, recovery_list + wallet_node_1.wallet_state_manager, wallet_1, uint64(201), action_scope, recovery_list ) - await full_node_api.process_transaction_records(records=action_scope.side_effects.transactions) - await full_node_api.wait_for_wallets_synced(wallet_nodes=[wallet_node, wallet_node_2]) + env_1.wallet_aliases["did_2"] = 3 - assert did_wallet_3.did_info.backup_ids == recovery_list - await time_out_assert(15, did_wallet_3.get_confirmed_balance, 201) - await time_out_assert(15, did_wallet_3.get_unconfirmed_balance, 201) + await wallet_environments.process_pending_states( + [ + WalletStateTransition( + pre_block_balance_updates={}, + post_block_balance_updates={}, + ), + WalletStateTransition( + pre_block_balance_updates={ + "xch": { + "set_remainder": True, + }, + "did": { + "set_remainder": True, + }, + "did_2": { + "init": True, + "unconfirmed_wallet_balance": 201, + "pending_change": 201, + "pending_coin_removal_count": 1, + }, + }, + post_block_balance_updates={ + "xch": { + "set_remainder": True, + }, + "did": { + "set_remainder": True, + }, + "did_2": { + "confirmed_wallet_balance": 201, + "spendable_balance": 201, + "max_send_amount": 201, + "unspent_coin_count": 1, + "pending_change": -201, + "pending_coin_removal_count": -1, + }, + }, + ), + ] + ) coin = await did_wallet_3.get_coin() backup_data = did_wallet_3.create_backup() - async with wallet_node.wallet_state_manager.lock: + async with wallet_node_0.wallet_state_manager.lock: did_wallet_4 = await DIDWallet.create_new_did_wallet_from_recovery( - wallet_node.wallet_state_manager, - wallet, + wallet_node_0.wallet_state_manager, + wallet_0, backup_data, ) assert did_wallet_4.get_name() == "Profile 2" + env_0.wallet_aliases["did_2"] = 3 pubkey = ( await did_wallet_4.wallet_state_manager.get_unused_derivation_record(did_wallet_2.wallet_info.id) ).pubkey new_ph = did_wallet_4.did_info.temp_puzhash - async with did_wallet.wallet_state_manager.new_action_scope(DEFAULT_TX_CONFIG, push=True) as action_scope: + async with did_wallet.wallet_state_manager.new_action_scope( + wallet_environments.tx_config, push=True + ) as action_scope: message_spend_bundle, attest1 = await did_wallet.create_attestment( coin.name(), new_ph, pubkey, action_scope ) - async with did_wallet_2.wallet_state_manager.new_action_scope(DEFAULT_TX_CONFIG, push=True) as action_scope_2: + async with did_wallet_2.wallet_state_manager.new_action_scope( + wallet_environments.tx_config, push=True + ) as action_scope_2: message_spend_bundle2, attest2 = await did_wallet_2.create_attestment( coin.name(), new_ph, pubkey, action_scope_2 ) @@ -524,233 +608,479 @@ async def test_did_recovery_with_multiple_backup_dids(self, self_hostname, two_w ) = await did_wallet_4.load_attest_files_for_recovery_spend([attest1, attest2]) assert message_spend_bundle == test_message_spend_bundle - await full_node_api.process_transaction_records( - records=[*action_scope.side_effects.transactions, *action_scope_2.side_effects.transactions] + await wallet_environments.process_pending_states( + [ + WalletStateTransition( + pre_block_balance_updates={ + "did": { + "spendable_balance": -101, + "pending_change": 101, + "max_send_amount": -101, + "pending_coin_removal_count": 1, + "set_remainder": True, + }, + "did_2": { + "init": True, + "unconfirmed_wallet_balance": 0, + "pending_change": 0, + "pending_coin_removal_count": 0, + }, + }, + post_block_balance_updates={ + "did": { + "spendable_balance": 101, + "pending_change": -101, + "max_send_amount": 101, + "pending_coin_removal_count": -1, + }, + "did_2": { + "confirmed_wallet_balance": 0, + "spendable_balance": 0, + "max_send_amount": 0, + "unspent_coin_count": 0, + "pending_change": 0, + "pending_coin_removal_count": 0, + }, + }, + ), + WalletStateTransition( + pre_block_balance_updates={ + "did": { + "spendable_balance": -101, + "pending_change": 101, + "max_send_amount": -101, + "pending_coin_removal_count": 1, + "set_remainder": True, + }, + }, + post_block_balance_updates={ + "did": { + "spendable_balance": 101, + "pending_change": -101, + "max_send_amount": 101, + "pending_coin_removal_count": -1, + }, + }, + ), + ] ) - await full_node_api.wait_for_wallets_synced(wallet_nodes=[wallet_node, wallet_node_2]) - await time_out_assert(15, did_wallet_4.get_confirmed_balance, 0) - await time_out_assert(15, did_wallet_4.get_unconfirmed_balance, 0) - async with did_wallet_4.wallet_state_manager.new_action_scope(DEFAULT_TX_CONFIG, push=True) as action_scope: + + async with did_wallet_4.wallet_state_manager.new_action_scope( + wallet_environments.tx_config, push=True + ) as action_scope: await did_wallet_4.recovery_spend(coin, new_ph, test_info_list, pubkey, message_spend_bundle, action_scope) - await full_node_api.process_transaction_records(action_scope.side_effects.transactions) - await full_node_api.wait_for_wallets_synced(wallet_nodes=[wallet_node, wallet_node_2]) - await time_out_assert(15, did_wallet_4.get_confirmed_balance, 201) - await time_out_assert(15, did_wallet_4.get_unconfirmed_balance, 201) - await time_out_assert(15, did_wallet_3.get_confirmed_balance, 0) - await time_out_assert(15, did_wallet_3.get_unconfirmed_balance, 0) + await wallet_environments.process_pending_states( + [ + WalletStateTransition( + pre_block_balance_updates={ + "did_2": { + "unconfirmed_wallet_balance": 201, + "pending_change": 0, + "pending_coin_removal_count": 3, + }, + }, + post_block_balance_updates={ + "did_2": { + "confirmed_wallet_balance": 201, + "spendable_balance": 201, + "max_send_amount": 201, + "unspent_coin_count": 1, + "pending_change": 0, + "pending_coin_removal_count": -3, + }, + }, + ), + WalletStateTransition( + pre_block_balance_updates={ + "did_2": { + "unconfirmed_wallet_balance": 0, # TODO: fix pre-block balances for recovery + "spendable_balance": 0, + "pending_change": 0, + "max_send_amount": 0, + "pending_coin_removal_count": 0, + "set_remainder": True, + }, + }, + post_block_balance_updates={ + "did_2": { + "confirmed_wallet_balance": -201, + "spendable_balance": -201, + "max_send_amount": -201, + "unspent_coin_count": -1, + "set_remainder": True, + }, + }, + ), + ] + ) for wallet in [did_wallet, did_wallet_2, did_wallet_3, did_wallet_4]: assert wallet.wallet_state_manager.wallets[wallet.id()] == wallet - @pytest.mark.parametrize( - "trusted", - [True, False], - ) + @pytest.mark.limit_consensus_modes(allowed=[ConsensusMode.PLAIN], reason="irrelevant") + @pytest.mark.parametrize("wallet_environments", [{"num_environments": 1, "blocks_needed": [1]}], indirect=True) @pytest.mark.anyio - async def test_did_recovery_with_empty_set(self, self_hostname, two_wallet_nodes, trusted): - full_nodes, wallets, _ = two_wallet_nodes - full_node_api = full_nodes[0] - server_1 = full_node_api.server - wallet_node, server_2 = wallets[0] - wallet_node_2, server_3 = wallets[1] - wallet = wallet_node.wallet_state_manager.main_wallet + async def test_did_recovery_with_empty_set(self, wallet_environments: WalletTestFramework): + env_0 = wallet_environments.environments[0] + wallet_node_0 = env_0.node + wallet_0 = env_0.xch_wallet - ph = await wallet.get_new_puzzlehash() - if trusted: - wallet_node.config["trusted_peers"] = { - full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex() - } - wallet_node_2.config["trusted_peers"] = { - full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex() - } - else: - wallet_node.config["trusted_peers"] = {} - wallet_node_2.config["trusted_peers"] = {} - await server_2.start_client(PeerInfo(self_hostname, server_1.get_port()), None) - await server_3.start_client(PeerInfo(self_hostname, server_1.get_port()), None) + env_0.wallet_aliases = { + "xch": 1, + "did": 2, + } - await full_node_api.farm_blocks_to_wallet(1, wallet) + ph = await wallet_0.get_new_puzzlehash() - async with wallet.wallet_state_manager.new_action_scope(DEFAULT_TX_CONFIG, push=True) as action_scope: + async with wallet_0.wallet_state_manager.new_action_scope(DEFAULT_TX_CONFIG, push=True) as action_scope: did_wallet: DIDWallet = await DIDWallet.create_new_did_wallet( - wallet_node.wallet_state_manager, wallet, uint64(101), action_scope + wallet_node_0.wallet_state_manager, wallet_0, uint64(101), action_scope ) - await full_node_api.process_transaction_records(records=action_scope.side_effects.transactions) - await full_node_api.wait_for_wallets_synced(wallet_nodes=[wallet_node, wallet_node_2]) - - await time_out_assert(15, did_wallet.get_confirmed_balance, 101) - await time_out_assert(15, did_wallet.get_unconfirmed_balance, 101) - coin = await did_wallet.get_coin() - info = Program.to([]) - pubkey = (await did_wallet.wallet_state_manager.get_unused_derivation_record(did_wallet.wallet_info.id)).pubkey - with pytest.raises(Exception): # We expect a CLVM 80 error for this test - async with did_wallet.wallet_state_manager.new_action_scope(DEFAULT_TX_CONFIG, push=False) as action_scope: - await did_wallet.recovery_spend( - coin, ph, info, pubkey, WalletSpendBundle([], AugSchemeMPL.aggregate([])), action_scope - ) - - @pytest.mark.parametrize( - "trusted", - [True, False], - ) - @pytest.mark.anyio - async def test_did_find_lost_did(self, self_hostname, two_wallet_nodes, trusted): - full_nodes, wallets, _ = two_wallet_nodes - full_node_api = full_nodes[0] - server_1 = full_node_api.server - wallet_node, server_2 = wallets[0] - wallet_node_2, server_3 = wallets[1] - wallet = wallet_node.wallet_state_manager.main_wallet - api_0 = WalletRpcApi(wallet_node) - if trusted: - wallet_node.config["trusted_peers"] = { - full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex() - } - wallet_node_2.config["trusted_peers"] = { - full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex() - } - else: - wallet_node.config["trusted_peers"] = {} - wallet_node_2.config["trusted_peers"] = {} - await server_2.start_client(PeerInfo(self_hostname, server_1.get_port()), None) - await server_3.start_client(PeerInfo(self_hostname, server_1.get_port()), None) - await full_node_api.farm_blocks_to_wallet(1, wallet) + await wallet_environments.process_pending_states( + [ + WalletStateTransition( + pre_block_balance_updates={ + "xch": { + "set_remainder": True, + }, + "did": { + "init": True, + "unconfirmed_wallet_balance": 101, + "pending_change": 101, + "pending_coin_removal_count": 1, + }, + }, + post_block_balance_updates={ + "xch": { + "set_remainder": True, + }, + "did": { + "confirmed_wallet_balance": 101, + "spendable_balance": 101, + "max_send_amount": 101, + "unspent_coin_count": 1, + "pending_change": -101, + "pending_coin_removal_count": -1, + }, + }, + ), + ] + ) + coin = await did_wallet.get_coin() + info = Program.to([]) + pubkey = (await did_wallet.wallet_state_manager.get_unused_derivation_record(did_wallet.wallet_info.id)).pubkey + with pytest.raises(Exception): # We expect a CLVM 80 error for this test + async with did_wallet.wallet_state_manager.new_action_scope( + wallet_environments.tx_config, push=False + ) as action_scope: + await did_wallet.recovery_spend( + coin, + ph, + info, # type: ignore + pubkey, + WalletSpendBundle([], AugSchemeMPL.aggregate([])), + action_scope, + ) - async with wallet.wallet_state_manager.new_action_scope(DEFAULT_TX_CONFIG, push=True) as action_scope: - did_wallet: DIDWallet = await DIDWallet.create_new_did_wallet( - wallet_node.wallet_state_manager, wallet, uint64(101), action_scope + @pytest.mark.limit_consensus_modes(allowed=[ConsensusMode.PLAIN], reason="irrelevant") + @pytest.mark.parametrize("wallet_environments", [{"num_environments": 1, "blocks_needed": [1]}], indirect=True) + @pytest.mark.anyio + async def test_did_find_lost_did(self, wallet_environments: WalletTestFramework): + env_0 = wallet_environments.environments[0] + wallet_node_0 = env_0.node + wallet_0 = env_0.xch_wallet + api_0 = env_0.rpc_api + + env_0.wallet_aliases = { + "xch": 1, + "did": 2, + } + + async with wallet_0.wallet_state_manager.new_action_scope( + wallet_environments.tx_config, push=True + ) as action_scope: + did_wallet = await DIDWallet.create_new_did_wallet( + wallet_node_0.wallet_state_manager, wallet_0, uint64(101), action_scope ) - await full_node_api.process_transaction_records(records=action_scope.side_effects.transactions) - await full_node_api.wait_for_wallets_synced(wallet_nodes=[wallet_node, wallet_node_2]) - await time_out_assert(15, did_wallet.get_confirmed_balance, 101) - await time_out_assert(15, did_wallet.get_unconfirmed_balance, 101) + await wallet_environments.process_pending_states( + [ + WalletStateTransition( + pre_block_balance_updates={ + "xch": { + "set_remainder": True, + }, + "did": { + "init": True, + "unconfirmed_wallet_balance": 101, + "pending_change": 101, + "pending_coin_removal_count": 1, + }, + }, + post_block_balance_updates={ + "xch": { + "set_remainder": True, + }, + "did": { + "confirmed_wallet_balance": 101, + "spendable_balance": 101, + "max_send_amount": 101, + "unspent_coin_count": 1, + "pending_change": -101, + "pending_coin_removal_count": -1, + }, + }, + ), + ] + ) + # Delete the coin and wallet coin = await did_wallet.get_coin() - await wallet_node.wallet_state_manager.coin_store.delete_coin_record(coin.name()) - await time_out_assert(15, did_wallet.get_confirmed_balance, 0) - await wallet_node.wallet_state_manager.user_store.delete_wallet(did_wallet.wallet_info.id) - wallet_node.wallet_state_manager.wallets.pop(did_wallet.wallet_info.id) - assert len(wallet_node.wallet_state_manager.wallets) == 1 + await wallet_node_0.wallet_state_manager.coin_store.delete_coin_record(coin.name()) + await wallet_node_0.wallet_state_manager.user_store.delete_wallet(did_wallet.wallet_info.id) + wallet_node_0.wallet_state_manager.wallets.pop(did_wallet.wallet_info.id) + assert len(wallet_node_0.wallet_state_manager.wallets) == 1 # Find lost DID + assert did_wallet.did_info.origin_coin is not None # mypy resp = await api_0.did_find_lost_did({"coin_id": did_wallet.did_info.origin_coin.name().hex()}) assert resp["success"] did_wallets = list( filter( lambda w: (w.type == WalletType.DECENTRALIZED_ID), - await wallet_node.wallet_state_manager.get_all_wallet_info_entries(), + await wallet_node_0.wallet_state_manager.get_all_wallet_info_entries(), ) ) - did_wallet: Optional[DIDWallet] = wallet_node.wallet_state_manager.wallets[did_wallets[0].id] - await time_out_assert(15, did_wallet.get_confirmed_balance, 101) - await time_out_assert(15, did_wallet.get_unconfirmed_balance, 101) + did_wallet = wallet_node_0.wallet_state_manager.wallets[did_wallets[0].id] # Spend DID recovery_list = [bytes32.fromhex(did_wallet.get_my_DID())] await did_wallet.update_recovery_list(recovery_list, uint64(1)) assert did_wallet.did_info.backup_ids == recovery_list - async with did_wallet.wallet_state_manager.new_action_scope(DEFAULT_TX_CONFIG, push=True) as action_scope: + async with did_wallet.wallet_state_manager.new_action_scope( + wallet_environments.tx_config, push=True + ) as action_scope: await did_wallet.create_update_spend(action_scope) - await full_node_api.process_transaction_records(records=action_scope.side_effects.transactions) - await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node, timeout=20) - await time_out_assert(15, did_wallet.get_confirmed_balance, 101) - await time_out_assert(15, did_wallet.get_unconfirmed_balance, 101) + env_0.wallet_aliases["did_found"] = 3 + + await wallet_environments.process_pending_states( + [ + WalletStateTransition( + pre_block_balance_updates={ + "did_found": { + "init": True, + "confirmed_wallet_balance": 101, + "unconfirmed_wallet_balance": 202, # Seems strange + "spendable_balance": 101, + "max_send_amount": 101, + "unspent_coin_count": 1, + "pending_change": 101, + "pending_coin_removal_count": 1, + "set_remainder": True, + }, + }, + post_block_balance_updates={ + "did_found": { + "confirmed_wallet_balance": 0, + "unconfirmed_wallet_balance": -101, + "spendable_balance": 0, + "max_send_amount": 0, + "unspent_coin_count": 0, + "pending_change": -101, + "pending_coin_removal_count": -1, + }, + }, + ), + ] + ) + # Delete the coin and change inner puzzle coin = await did_wallet.get_coin() - await wallet_node.wallet_state_manager.coin_store.delete_coin_record(coin.name()) - await time_out_assert(15, did_wallet.get_confirmed_balance, 0) + await wallet_node_0.wallet_state_manager.coin_store.delete_coin_record(coin.name()) new_inner_puzzle = await did_wallet.get_new_did_innerpuz() did_wallet.did_info = dataclasses.replace(did_wallet.did_info, current_inner=new_inner_puzzle) # Recovery the coin + assert did_wallet.did_info.origin_coin is not None # mypy resp = await api_0.did_find_lost_did({"coin_id": did_wallet.did_info.origin_coin.name().hex()}) assert resp["success"] - await time_out_assert(15, did_wallet.get_confirmed_balance, 101) + found_coin = await did_wallet.get_coin() + assert found_coin == coin assert did_wallet.did_info.current_inner != new_inner_puzzle - @pytest.mark.parametrize( - "trusted", - [True, False], - ) + @pytest.mark.limit_consensus_modes(allowed=[ConsensusMode.PLAIN], reason="irrelevant") + @pytest.mark.parametrize("wallet_environments", [{"num_environments": 2, "blocks_needed": [1, 1]}], indirect=True) @pytest.mark.anyio - async def test_did_attest_after_recovery(self, self_hostname, two_wallet_nodes, trusted): - full_nodes, wallets, _ = two_wallet_nodes - full_node_api = full_nodes[0] - server_1 = full_node_api.server - wallet_node, server_2 = wallets[0] - wallet_node_2, server_3 = wallets[1] - wallet = wallet_node.wallet_state_manager.main_wallet - wallet2 = wallet_node_2.wallet_state_manager.main_wallet - if trusted: - wallet_node.config["trusted_peers"] = { - full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex() - } - wallet_node_2.config["trusted_peers"] = { - full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex() - } - else: - wallet_node.config["trusted_peers"] = {} - wallet_node_2.config["trusted_peers"] = {} - await server_2.start_client(PeerInfo(self_hostname, server_1.get_port()), None) - await server_3.start_client(PeerInfo(self_hostname, server_1.get_port()), None) - await full_node_api.farm_blocks_to_wallet(1, wallet) - await full_node_api.farm_blocks_to_wallet(1, wallet2) + async def test_did_attest_after_recovery(self, wallet_environments: WalletTestFramework): + env_0 = wallet_environments.environments[0] + env_1 = wallet_environments.environments[1] + wallet_node_0 = env_0.node + wallet_node_1 = env_1.node + wallet_0 = env_0.xch_wallet + wallet_1 = env_1.xch_wallet - async with wallet.wallet_state_manager.new_action_scope(DEFAULT_TX_CONFIG, push=True) as action_scope: + env_0.wallet_aliases = { + "xch": 1, + "did": 2, + } + env_1.wallet_aliases = { + "xch": 1, + "did": 2, + } + + async with wallet_0.wallet_state_manager.new_action_scope( + wallet_environments.tx_config, push=True + ) as action_scope: did_wallet: DIDWallet = await DIDWallet.create_new_did_wallet( - wallet_node.wallet_state_manager, wallet, uint64(101), action_scope + wallet_node_0.wallet_state_manager, wallet_0, uint64(101), action_scope ) - await full_node_api.process_transaction_records(records=action_scope.side_effects.transactions) - await full_node_api.wait_for_wallets_synced(wallet_nodes=[wallet_node, wallet_node_2]) + await wallet_environments.process_pending_states( + [ + WalletStateTransition( + pre_block_balance_updates={ + "xch": { + "set_remainder": True, + }, + "did": { + "init": True, + "unconfirmed_wallet_balance": 101, + "pending_change": 101, + "pending_coin_removal_count": 1, + }, + }, + post_block_balance_updates={ + "xch": { + "set_remainder": True, + }, + "did": { + "confirmed_wallet_balance": 101, + "spendable_balance": 101, + "max_send_amount": 101, + "unspent_coin_count": 1, + "pending_change": -101, + "pending_coin_removal_count": -1, + }, + }, + ), + WalletStateTransition(), + ] + ) - await time_out_assert(15, did_wallet.get_confirmed_balance, 101) - await time_out_assert(15, did_wallet.get_unconfirmed_balance, 101) - recovery_list = [bytes.fromhex(did_wallet.get_my_DID())] + recovery_list = [bytes32.from_hexstr(did_wallet.get_my_DID())] - async with wallet2.wallet_state_manager.new_action_scope(DEFAULT_TX_CONFIG, push=True) as action_scope: + async with wallet_1.wallet_state_manager.new_action_scope( + wallet_environments.tx_config, push=True + ) as action_scope: did_wallet_2: DIDWallet = await DIDWallet.create_new_did_wallet( - wallet_node_2.wallet_state_manager, wallet2, uint64(101), action_scope, recovery_list + wallet_node_1.wallet_state_manager, wallet_1, uint64(101), action_scope, recovery_list ) - await full_node_api.process_transaction_records(records=action_scope.side_effects.transactions) - await full_node_api.wait_for_wallets_synced(wallet_nodes=[wallet_node, wallet_node_2]) - await time_out_assert(25, did_wallet_2.get_confirmed_balance, 101) - await time_out_assert(25, did_wallet_2.get_unconfirmed_balance, 101) + await wallet_environments.process_pending_states( + [ + WalletStateTransition(), + WalletStateTransition( + pre_block_balance_updates={ + "xch": { + "set_remainder": True, + }, + "did": { + "init": True, + "unconfirmed_wallet_balance": 101, + "pending_change": 101, + "pending_coin_removal_count": 1, + }, + }, + post_block_balance_updates={ + "xch": { + "set_remainder": True, + }, + "did": { + "confirmed_wallet_balance": 101, + "spendable_balance": 101, + "max_send_amount": 101, + "unspent_coin_count": 1, + "pending_change": -101, + "pending_coin_removal_count": -1, + }, + }, + ), + ] + ) + assert did_wallet_2.did_info.backup_ids == recovery_list # Update coin with new ID info - recovery_list = [bytes.fromhex(did_wallet_2.get_my_DID())] + recovery_list = [bytes32.from_hexstr(did_wallet_2.get_my_DID())] await did_wallet.update_recovery_list(recovery_list, uint64(1)) assert did_wallet.did_info.backup_ids == recovery_list - async with did_wallet.wallet_state_manager.new_action_scope(DEFAULT_TX_CONFIG, push=True) as action_scope: + async with did_wallet.wallet_state_manager.new_action_scope( + wallet_environments.tx_config, push=True + ) as action_scope: await did_wallet.create_update_spend(action_scope) - await full_node_api.process_transaction_records(records=action_scope.side_effects.transactions) - await full_node_api.wait_for_wallets_synced(wallet_nodes=[wallet_node, wallet_node_2]) - await time_out_assert(15, did_wallet.get_confirmed_balance, 101) - await time_out_assert(15, did_wallet.get_unconfirmed_balance, 101) + await wallet_environments.process_pending_states( + [ + WalletStateTransition( + pre_block_balance_updates={ + "did": { + "set_remainder": True, + } + }, + post_block_balance_updates={ + "did": { + "set_remainder": True, + } + }, + ), + WalletStateTransition(), + ] + ) # DID Wallet 2 recovers into DID Wallet 3 with new innerpuz backup_data = did_wallet_2.create_backup() - async with wallet_node.wallet_state_manager.lock: + async with wallet_node_0.wallet_state_manager.lock: did_wallet_3 = await DIDWallet.create_new_did_wallet_from_recovery( - wallet_node.wallet_state_manager, - wallet, + wallet_node_0.wallet_state_manager, + wallet_0, backup_data, ) + env_0.wallet_aliases["did_2"] = 3 new_ph = await did_wallet_3.get_new_did_inner_hash() coin = await did_wallet_2.get_coin() pubkey = ( await did_wallet_3.wallet_state_manager.get_unused_derivation_record(did_wallet_3.wallet_info.id) ).pubkey - await time_out_assert(15, did_wallet.get_confirmed_balance, 101) - async with did_wallet.wallet_state_manager.new_action_scope(DEFAULT_TX_CONFIG, push=True) as action_scope: + + async with did_wallet.wallet_state_manager.new_action_scope( + wallet_environments.tx_config, push=True + ) as action_scope: message_spend_bundle, attest_data = await did_wallet.create_attestment( coin.name(), new_ph, pubkey, action_scope ) - await full_node_api.process_transaction_records(records=action_scope.side_effects.transactions) - await full_node_api.wait_for_wallets_synced(wallet_nodes=[wallet_node, wallet_node_2]) + await wallet_environments.process_pending_states( + [ + WalletStateTransition( + pre_block_balance_updates={ + "did": { + "set_remainder": True, + }, + "did_2": { + "init": True, + "set_remainder": True, + }, + }, + post_block_balance_updates={ + "did": { + "set_remainder": True, + }, + "did_2": { + "set_remainder": True, + }, + }, + ), + WalletStateTransition(), + ] + ) ( info, @@ -758,33 +1088,94 @@ async def test_did_attest_after_recovery(self, self_hostname, two_wallet_nodes, ) = await did_wallet_3.load_attest_files_for_recovery_spend([attest_data]) async with did_wallet_3.wallet_state_manager.new_action_scope(DEFAULT_TX_CONFIG, push=True) as action_scope: await did_wallet_3.recovery_spend(coin, new_ph, info, pubkey, message_spend_bundle, action_scope) - await full_node_api.process_transaction_records(action_scope.side_effects.transactions) - await full_node_api.wait_for_wallets_synced(wallet_nodes=[wallet_node, wallet_node_2]) - - await time_out_assert(15, did_wallet_3.get_confirmed_balance, 101) - await time_out_assert(15, did_wallet_3.get_unconfirmed_balance, 101) + await wallet_environments.process_pending_states( + [ + WalletStateTransition( + pre_block_balance_updates={ + "did_2": { + "unconfirmed_wallet_balance": 101, + "set_remainder": True, + }, + }, + post_block_balance_updates={ + "did_2": { + "confirmed_wallet_balance": 101, + "set_remainder": True, + }, + }, + ), + WalletStateTransition( + pre_block_balance_updates={ + "did": { + "unconfirmed_wallet_balance": 0, # TODO: fix pre-block balances for recovery + "set_remainder": True, + }, + }, + post_block_balance_updates={ + "did": { + "confirmed_wallet_balance": -101, + "set_remainder": True, + }, + }, + ), + ] + ) # DID Wallet 1 recovery spends into DID Wallet 4 backup_data = did_wallet.create_backup() - async with wallet_node_2.wallet_state_manager.lock: + async with wallet_node_1.wallet_state_manager.lock: did_wallet_4 = await DIDWallet.create_new_did_wallet_from_recovery( - wallet_node_2.wallet_state_manager, - wallet2, + wallet_node_1.wallet_state_manager, + wallet_1, backup_data, ) + env_1.wallet_aliases["did_2"] = 3 coin = await did_wallet.get_coin() new_ph = await did_wallet_4.get_new_did_inner_hash() pubkey = ( await did_wallet_4.wallet_state_manager.get_unused_derivation_record(did_wallet_4.wallet_info.id) ).pubkey - async with did_wallet_3.wallet_state_manager.new_action_scope(DEFAULT_TX_CONFIG, push=True) as action_scope: + async with did_wallet_3.wallet_state_manager.new_action_scope( + wallet_environments.tx_config, push=True + ) as action_scope: message_spend_bundle, attest1 = await did_wallet_3.create_attestment( coin.name(), new_ph, pubkey, action_scope ) - await full_node_api.process_transaction_records(records=action_scope.side_effects.transactions) - await full_node_api.wait_for_wallets_synced(wallet_nodes=[wallet_node, wallet_node_2]) - await time_out_assert(15, wallet.get_pending_change_balance, 0) + + await wallet_environments.process_pending_states( + [ + WalletStateTransition( + pre_block_balance_updates={ + "did_2": { + "unconfirmed_wallet_balance": 0, + "set_remainder": True, + }, + }, + post_block_balance_updates={ + "did_2": { + "confirmed_wallet_balance": 0, + "set_remainder": True, + }, + }, + ), + WalletStateTransition( + pre_block_balance_updates={ + "did_2": { + "init": True, + "set_remainder": True, + }, + }, + post_block_balance_updates={ + "did_2": { + "confirmed_wallet_balance": 0, + "set_remainder": True, + }, + }, + ), + ] + ) + ( test_info_list, test_message_spend_bundle, @@ -794,87 +1185,161 @@ async def test_did_attest_after_recovery(self, self_hostname, two_wallet_nodes, coin, new_ph, test_info_list, pubkey, test_message_spend_bundle, action_scope ) - await full_node_api.process_transaction_records(action_scope.side_effects.transactions) - await full_node_api.wait_for_wallets_synced(wallet_nodes=[wallet_node, wallet_node_2]) - - await time_out_assert(15, did_wallet_4.get_confirmed_balance, 101) - await time_out_assert(15, did_wallet_4.get_unconfirmed_balance, 101) - await time_out_assert(15, did_wallet.get_confirmed_balance, 0) - await time_out_assert(15, did_wallet.get_unconfirmed_balance, 0) + await wallet_environments.process_pending_states( + [ + WalletStateTransition( + pre_block_balance_updates={ + "did": { + "set_remainder": True, + }, + }, + post_block_balance_updates={ + "did": { + "confirmed_wallet_balance": -101, + "set_remainder": True, + }, + }, + ), + WalletStateTransition( + pre_block_balance_updates={ + "did_2": { + "set_remainder": True, + }, + }, + post_block_balance_updates={ + "did_2": { + "confirmed_wallet_balance": 101, + "set_remainder": True, + }, + }, + ), + ] + ) for wallet in [did_wallet, did_wallet_3, did_wallet_4]: assert wallet.wallet_state_manager.wallets[wallet.id()] == wallet + @pytest.mark.limit_consensus_modes(allowed=[ConsensusMode.PLAIN], reason="irrelevant") + @pytest.mark.parametrize("wallet_environments", [{"num_environments": 2, "blocks_needed": [1, 1]}], indirect=True) @pytest.mark.parametrize( "with_recovery", [True, False], ) - @pytest.mark.parametrize( - "trusted", - [True, False], - ) @pytest.mark.anyio - async def test_did_transfer(self, self_hostname, two_wallet_nodes, with_recovery, trusted): - fee = uint64(1000) - full_nodes, wallets, _ = two_wallet_nodes - full_node_api = full_nodes[0] - server_1 = full_node_api.server - wallet_node, server_2 = wallets[0] - wallet_node_2, server_3 = wallets[1] - wallet = wallet_node.wallet_state_manager.main_wallet - wallet2 = wallet_node_2.wallet_state_manager.main_wallet - ph = await wallet.get_new_puzzlehash() - - if trusted: - wallet_node.config["trusted_peers"] = { - full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex() - } - wallet_node_2.config["trusted_peers"] = { - full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex() - } - else: - wallet_node.config["trusted_peers"] = {} - wallet_node_2.config["trusted_peers"] = {} + async def test_did_transfer(self, wallet_environments: WalletTestFramework, with_recovery: bool): + env_0 = wallet_environments.environments[0] + env_1 = wallet_environments.environments[1] + wallet_node_0 = env_0.node + wallet_node_1 = env_1.node + wallet_0 = env_0.xch_wallet + wallet_1 = env_1.xch_wallet - await server_2.start_client(PeerInfo(self_hostname, server_1.get_port()), None) - await server_3.start_client(PeerInfo(self_hostname, server_1.get_port()), None) - await full_node_api.farm_blocks_to_wallet(1, wallet) + env_0.wallet_aliases = { + "xch": 1, + "did": 2, + } + env_1.wallet_aliases = { + "xch": 1, + "did": 2, + } + ph = await wallet_0.get_new_puzzlehash() + fee = uint64(1000) - async with wallet.wallet_state_manager.new_action_scope(DEFAULT_TX_CONFIG, push=True) as action_scope: + async with wallet_0.wallet_state_manager.new_action_scope( + wallet_environments.tx_config, push=True + ) as action_scope: did_wallet_1: DIDWallet = await DIDWallet.create_new_did_wallet( - wallet_node.wallet_state_manager, - wallet, + wallet_node_0.wallet_state_manager, + wallet_0, uint64(101), action_scope, - [bytes(ph)], + [ph], uint64(1), {"Twitter": "Test", "GitHub": "测试"}, fee=fee, ) assert did_wallet_1.get_name() == "Profile 1" - await full_node_api.process_transaction_records(records=action_scope.side_effects.transactions) - await full_node_api.wait_for_wallets_synced(wallet_nodes=[wallet_node, wallet_node_2]) - await time_out_assert(15, did_wallet_1.get_confirmed_balance, 101) - await time_out_assert(15, did_wallet_1.get_unconfirmed_balance, 101) + + await wallet_environments.process_pending_states( + [ + WalletStateTransition( + pre_block_balance_updates={ + "xch": { + "set_remainder": True, + }, + "did": { + "init": True, + "unconfirmed_wallet_balance": 101, + "pending_change": 101, + "pending_coin_removal_count": 1, + }, + }, + post_block_balance_updates={ + "xch": { + "set_remainder": True, + }, + "did": { + "confirmed_wallet_balance": 101, + "spendable_balance": 101, + "max_send_amount": 101, + "unspent_coin_count": 1, + "pending_change": -101, + "pending_coin_removal_count": -1, + }, + }, + ), + WalletStateTransition(), + ] + ) + # Transfer DID - new_puzhash = await wallet2.get_new_puzzlehash() - async with did_wallet_1.wallet_state_manager.new_action_scope(DEFAULT_TX_CONFIG, push=True) as action_scope: + new_puzhash = await wallet_1.get_new_puzzlehash() + async with did_wallet_1.wallet_state_manager.new_action_scope( + wallet_environments.tx_config, push=True + ) as action_scope: await did_wallet_1.transfer_did(new_puzhash, fee, with_recovery, action_scope) - await full_node_api.process_transaction_records(records=action_scope.side_effects.transactions) - await full_node_api.wait_for_wallets_synced(wallet_nodes=[wallet_node, wallet_node_2]) - # Check if the DID wallet is created in the wallet2 - await time_out_assert(30, get_wallet_num, 2, wallet_node_2.wallet_state_manager) - await time_out_assert(30, get_wallet_num, 1, wallet_node.wallet_state_manager) + await wallet_environments.process_pending_states( + [ + WalletStateTransition( + pre_block_balance_updates={ + "xch": { + "set_remainder": True, + }, + "did": { + "unconfirmed_wallet_balance": -101, + "set_remainder": True, + }, + }, + post_block_balance_updates={ + "xch": { + "set_remainder": True, + }, + }, + ), + WalletStateTransition( + pre_block_balance_updates={}, + post_block_balance_updates={ + "did": { + "init": True, + "confirmed_wallet_balance": 101, + "set_remainder": True, + }, + }, + ), + ] + ) + # Get the new DID wallet did_wallets = list( filter( lambda w: (w.type == WalletType.DECENTRALIZED_ID), - await wallet_node_2.wallet_state_manager.get_all_wallet_info_entries(), + await wallet_node_1.wallet_state_manager.get_all_wallet_info_entries(), ) ) - did_wallet_2: Optional[DIDWallet] = wallet_node_2.wallet_state_manager.wallets[did_wallets[0].id] - assert len(wallet_node.wallet_state_manager.wallets) == 1 + did_wallet_2 = wallet_node_1.wallet_state_manager.wallets[did_wallets[0].id] + assert isinstance(did_wallet_2, DIDWallet) # mypy + assert len(wallet_node_0.wallet_state_manager.wallets) == 1 assert did_wallet_1.did_info.origin_coin == did_wallet_2.did_info.origin_coin if with_recovery: assert did_wallet_1.did_info.backup_ids[0] == did_wallet_2.did_info.backup_ids[0] @@ -889,89 +1354,113 @@ async def test_did_transfer(self, self_hostname, two_wallet_nodes, with_recovery new_puzhash, ) - @pytest.mark.parametrize( - "trusted", - [True, False], - ) + @pytest.mark.limit_consensus_modes(allowed=[ConsensusMode.PLAIN], reason="irrelevant") + @pytest.mark.parametrize("wallet_environments", [{"num_environments": 1, "blocks_needed": [1]}], indirect=True) @pytest.mark.anyio - async def test_update_recovery_list(self, self_hostname, two_wallet_nodes, trusted): - full_nodes, wallets, _ = two_wallet_nodes - full_node_api = full_nodes[0] - server_1 = full_node_api.server - wallet_node, server_2 = wallets[0] - wallet_node_2, server_3 = wallets[1] - wallet = wallet_node.wallet_state_manager.main_wallet - ph = await wallet.get_new_puzzlehash() + async def test_update_recovery_list(self, wallet_environments: WalletTestFramework): + env = wallet_environments.environments[0] + wallet_node = env.node + wallet = env.xch_wallet - if trusted: - wallet_node.config["trusted_peers"] = { - full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex() - } - wallet_node_2.config["trusted_peers"] = { - full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex() - } - else: - wallet_node.config["trusted_peers"] = {} - wallet_node_2.config["trusted_peers"] = {} + env.wallet_aliases = { + "xch": 1, + "did": 2, + } - await server_2.start_client(PeerInfo(self_hostname, server_1.get_port()), None) - await server_3.start_client(PeerInfo(self_hostname, server_1.get_port()), None) - await full_node_api.farm_blocks_to_wallet(1, wallet) + ph = await wallet.get_new_puzzlehash() - async with wallet.wallet_state_manager.new_action_scope(DEFAULT_TX_CONFIG, push=True) as action_scope: + async with wallet.wallet_state_manager.new_action_scope( + wallet_environments.tx_config, push=True + ) as action_scope: did_wallet_1: DIDWallet = await DIDWallet.create_new_did_wallet( wallet_node.wallet_state_manager, wallet, uint64(101), action_scope, [] ) - await full_node_api.process_transaction_records(records=action_scope.side_effects.transactions) - await full_node_api.wait_for_wallets_synced(wallet_nodes=[wallet_node, wallet_node_2]) - await time_out_assert(15, did_wallet_1.get_confirmed_balance, 101) - await time_out_assert(15, did_wallet_1.get_unconfirmed_balance, 101) - await did_wallet_1.update_recovery_list([bytes(ph)], 1) - async with did_wallet_1.wallet_state_manager.new_action_scope(DEFAULT_TX_CONFIG, push=True) as action_scope: + + await wallet_environments.process_pending_states( + [ + WalletStateTransition( + pre_block_balance_updates={ + "xch": { + "set_remainder": True, + }, + "did": { + "init": True, + "unconfirmed_wallet_balance": 101, + "pending_change": 101, + "pending_coin_removal_count": 1, + }, + }, + post_block_balance_updates={ + "xch": { + "set_remainder": True, + }, + "did": { + "confirmed_wallet_balance": 101, + "spendable_balance": 101, + "max_send_amount": 101, + "unspent_coin_count": 1, + "pending_change": -101, + "pending_coin_removal_count": -1, + }, + }, + ), + ] + ) + await did_wallet_1.update_recovery_list([ph], uint64(1)) + async with did_wallet_1.wallet_state_manager.new_action_scope( + wallet_environments.tx_config, push=True + ) as action_scope: await did_wallet_1.create_update_spend(action_scope) - await full_node_api.process_transaction_records(records=action_scope.side_effects.transactions) - await full_node_api.wait_for_wallets_synced(wallet_nodes=[wallet_node, wallet_node_2]) - await time_out_assert(15, did_wallet_1.get_confirmed_balance, 101) - await time_out_assert(15, did_wallet_1.get_unconfirmed_balance, 101) + + await wallet_environments.process_pending_states( + [ + WalletStateTransition( + pre_block_balance_updates={ + "did": { + "set_remainder": True, + }, + }, + post_block_balance_updates={ + "did": { + "set_remainder": True, + }, + }, + ), + ] + ) assert did_wallet_1.did_info.backup_ids[0] == bytes(ph) assert did_wallet_1.did_info.num_of_backup_ids_needed == 1 - @pytest.mark.parametrize( - "trusted", - [True, False], - ) + @pytest.mark.limit_consensus_modes(allowed=[ConsensusMode.PLAIN], reason="irrelevant") + @pytest.mark.parametrize("wallet_environments", [{"num_environments": 2, "blocks_needed": [1, 1]}], indirect=True) @pytest.mark.anyio - async def test_get_info(self, self_hostname, two_wallet_nodes, trusted): - fee = uint64(1000) - full_nodes, wallets, _ = two_wallet_nodes - full_node_api = full_nodes[0] - server_1 = full_node_api.server - wallet_node, server_2 = wallets[0] - wallet_node_2, server_3 = wallets[1] - wallet = wallet_node.wallet_state_manager.main_wallet - wallet1 = wallet_node_2.wallet_state_manager.main_wallet - ph1 = await wallet1.get_new_puzzlehash() - api_0 = WalletRpcApi(wallet_node) - if trusted: - wallet_node.config["trusted_peers"] = { - full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex() - } - wallet_node_2.config["trusted_peers"] = { - full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex() - } - else: - wallet_node.config["trusted_peers"] = {} - wallet_node_2.config["trusted_peers"] = {} + async def test_get_info(self, wallet_environments: WalletTestFramework): + env_0 = wallet_environments.environments[0] + env_1 = wallet_environments.environments[1] + wallet_node_0 = env_0.node + wallet_0 = env_0.xch_wallet + wallet_1 = env_1.xch_wallet + api_0 = env_0.rpc_api + + env_0.wallet_aliases = { + "xch": 1, + "did": 2, + } + env_1.wallet_aliases = { + "xch": 1, + "did": 2, + } - await server_2.start_client(PeerInfo(self_hostname, server_1.get_port()), None) - await server_3.start_client(PeerInfo(self_hostname, server_1.get_port()), None) - await full_node_api.farm_blocks_to_wallet(count=2, wallet=wallet) + fee = uint64(1000) did_amount = uint64(101) + ph_1 = await wallet_1.get_new_puzzlehash() - async with wallet.wallet_state_manager.new_action_scope(DEFAULT_TX_CONFIG, push=True) as action_scope: + async with wallet_0.wallet_state_manager.new_action_scope( + wallet_environments.tx_config, push=True + ) as action_scope: did_wallet_1: DIDWallet = await DIDWallet.create_new_did_wallet( - wallet_node.wallet_state_manager, - wallet, + wallet_node_0.wallet_state_manager, + wallet_0, did_amount, action_scope, [], @@ -979,14 +1468,42 @@ async def test_get_info(self, self_hostname, two_wallet_nodes, trusted): fee=fee, ) - await full_node_api.process_transaction_records(records=action_scope.side_effects.transactions) - await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node, timeout=15) - - assert await did_wallet_1.get_confirmed_balance() == did_amount - assert await did_wallet_1.get_unconfirmed_balance() == did_amount + await wallet_environments.process_pending_states( + [ + WalletStateTransition( + pre_block_balance_updates={ + "xch": { + "set_remainder": True, + }, + "did": { + "init": True, + "unconfirmed_wallet_balance": did_amount, + "pending_change": did_amount, + "pending_coin_removal_count": 1, + }, + }, + post_block_balance_updates={ + "xch": { + "set_remainder": True, + }, + "did": { + "confirmed_wallet_balance": did_amount, + "spendable_balance": did_amount, + "max_send_amount": did_amount, + "unspent_coin_count": 1, + "pending_change": -did_amount, + "pending_coin_removal_count": -1, + }, + }, + ), + WalletStateTransition(), + ] + ) + assert did_wallet_1.did_info.origin_coin is not None # mypy response = await api_0.did_get_info({"coin_id": did_wallet_1.did_info.origin_coin.name().hex()}) assert response["did_id"] == encode_puzzle_hash(did_wallet_1.did_info.origin_coin.name(), AddressType.DID.value) assert response["launcher_id"] == did_wallet_1.did_info.origin_coin.name().hex() + assert did_wallet_1.did_info.current_inner is not None # mypy assert response["full_puzzle"].to_program() == create_singleton_puzzle( did_wallet_1.did_info.current_inner, did_wallet_1.did_info.origin_coin.name() ) @@ -997,8 +1514,10 @@ async def test_get_info(self, self_hostname, two_wallet_nodes, trusted): assert decode_puzzle_hash(response["p2_address"]).hex() == response["hints"][0] # Test non-singleton coin - async with wallet.wallet_state_manager.new_action_scope(DEFAULT_TX_CONFIG, push=True) as action_scope: - coin = (await wallet.select_coins(uint64(1), action_scope)).pop() + async with wallet_0.wallet_state_manager.new_action_scope( + wallet_environments.tx_config, push=True + ) as action_scope: + coin = (await wallet_0.select_coins(uint64(1), action_scope)).pop() assert coin.amount % 2 == 1 coin_id = coin.name() response = await api_0.did_get_info({"coin_id": coin_id.hex()}) @@ -1006,60 +1525,112 @@ async def test_get_info(self, self_hostname, two_wallet_nodes, trusted): # Test multiple odd coins odd_amount = uint64(1) - async with wallet.wallet_state_manager.new_action_scope(DEFAULT_TX_CONFIG, push=True) as action_scope: + async with wallet_0.wallet_state_manager.new_action_scope( + wallet_environments.tx_config, push=True + ) as action_scope: async with action_scope.use() as interface: interface.side_effects.selected_coins.append(coin) - coin_1 = (await wallet.select_coins(odd_amount, action_scope)).pop() + coin_1 = (await wallet_0.select_coins(odd_amount, action_scope)).pop() assert coin_1.amount % 2 == 0 - async with wallet.wallet_state_manager.new_action_scope( - DEFAULT_TX_CONFIG.override(excluded_coin_ids=[coin_id]), push=True + async with wallet_0.wallet_state_manager.new_action_scope( + wallet_environments.tx_config.override(excluded_coin_ids=[coin_id]), push=True ) as action_scope: - await wallet.generate_signed_transaction(odd_amount, ph1, action_scope, fee) - await full_node_api.process_transaction_records(records=action_scope.side_effects.transactions) - await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_2, timeout=15) + await wallet_0.generate_signed_transaction(odd_amount, ph_1, action_scope, fee) + + await wallet_environments.process_pending_states( + [ + WalletStateTransition( + pre_block_balance_updates={ + "xch": { + "unconfirmed_wallet_balance": -odd_amount - fee, + "set_remainder": True, + }, + "did": { + "set_remainder": True, + }, + }, + post_block_balance_updates={ + "xch": { + "confirmed_wallet_balance": -odd_amount - fee, + "set_remainder": True, + }, + "did": { + "set_remainder": True, + }, + }, + ), + WalletStateTransition( + pre_block_balance_updates={ + "xch": { + "unconfirmed_wallet_balance": 0, + "set_remainder": True, + } + }, + post_block_balance_updates={ + "xch": { + "confirmed_wallet_balance": odd_amount, + "set_remainder": True, + } + }, + ), + ] + ) - assert await wallet1.get_confirmed_balance() == odd_amount with pytest.raises(ValueError): await api_0.did_get_info({"coin_id": coin_1.name().hex()}) - @pytest.mark.parametrize( - "trusted", - [True, False], - ) + @pytest.mark.limit_consensus_modes(allowed=[ConsensusMode.PLAIN], reason="irrelevant") + @pytest.mark.parametrize("wallet_environments", [{"num_environments": 1, "blocks_needed": [1]}], indirect=True) @pytest.mark.anyio - async def test_message_spend(self, self_hostname, two_wallet_nodes, trusted): - fee = uint64(1000) - full_nodes, wallets, _ = two_wallet_nodes - full_node_api = full_nodes[0] - server_1 = full_node_api.server - wallet_node, server_2 = wallets[0] - wallet_node_2, server_3 = wallets[1] - wallet = wallet_node.wallet_state_manager.main_wallet - api_0 = WalletRpcApi(wallet_node) - if trusted: - wallet_node.config["trusted_peers"] = { - full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex() - } - wallet_node_2.config["trusted_peers"] = { - full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex() - } - else: - wallet_node.config["trusted_peers"] = {} - wallet_node_2.config["trusted_peers"] = {} + async def test_message_spend(self, wallet_environments: WalletTestFramework): + env = wallet_environments.environments[0] + wallet_node = env.node + wallet = env.xch_wallet + api_0 = env.rpc_api - await server_2.start_client(PeerInfo(self_hostname, server_1.get_port()), None) - await server_3.start_client(PeerInfo(self_hostname, server_1.get_port()), None) + env.wallet_aliases = { + "xch": 1, + "did": 2, + } - await full_node_api.farm_blocks_to_wallet(1, wallet) + fee = uint64(1000) - async with wallet.wallet_state_manager.new_action_scope(DEFAULT_TX_CONFIG, push=True) as action_scope: + async with wallet.wallet_state_manager.new_action_scope( + wallet_environments.tx_config, push=True + ) as action_scope: did_wallet_1: DIDWallet = await DIDWallet.create_new_did_wallet( wallet_node.wallet_state_manager, wallet, uint64(101), action_scope, [], fee=fee ) - await full_node_api.process_transaction_records(records=action_scope.side_effects.transactions) - await full_node_api.wait_for_wallets_synced(wallet_nodes=[wallet_node, wallet_node_2]) - await time_out_assert(15, did_wallet_1.get_confirmed_balance, 101) - await time_out_assert(15, did_wallet_1.get_unconfirmed_balance, 101) + await wallet_environments.process_pending_states( + [ + WalletStateTransition( + pre_block_balance_updates={ + "xch": { + "set_remainder": True, + }, + "did": { + "init": True, + "unconfirmed_wallet_balance": 101, + "pending_change": 101, + "pending_coin_removal_count": 1, + }, + }, + post_block_balance_updates={ + "xch": { + "set_remainder": True, + }, + "did": { + "confirmed_wallet_balance": 101, + "spendable_balance": 101, + "max_send_amount": 101, + "unspent_coin_count": 1, + "pending_change": -101, + "pending_coin_removal_count": -1, + }, + }, + ), + ] + ) response = await api_0.did_message_spend( {"wallet_id": did_wallet_1.wallet_id, "coin_announcements": ["0abc"], "puzzle_announcements": ["0def"]} ) @@ -1073,124 +1644,172 @@ async def test_message_spend(self, self_hostname, two_wallet_nodes, trusted): assert len(conditions[ConditionOpcode.CREATE_PUZZLE_ANNOUNCEMENT]) == 1 assert conditions[ConditionOpcode.CREATE_PUZZLE_ANNOUNCEMENT][0].vars[0].hex() == "0def" - @pytest.mark.parametrize( - "trusted", - [True, False], - ) + @pytest.mark.limit_consensus_modes(allowed=[ConsensusMode.PLAIN], reason="irrelevant") + @pytest.mark.parametrize("wallet_environments", [{"num_environments": 1, "blocks_needed": [1]}], indirect=True) @pytest.mark.anyio - async def test_update_metadata(self, self_hostname, two_wallet_nodes, trusted): - fee = uint64(1000) - full_nodes, wallets, _ = two_wallet_nodes - full_node_api = full_nodes[0] - server_1 = full_node_api.server - wallet_node, server_2 = wallets[0] - wallet_node_2, server_3 = wallets[1] - wallet = wallet_node.wallet_state_manager.main_wallet - if trusted: - wallet_node.config["trusted_peers"] = { - full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex() - } - wallet_node_2.config["trusted_peers"] = { - full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex() - } - else: - wallet_node.config["trusted_peers"] = {} - wallet_node_2.config["trusted_peers"] = {} + async def test_update_metadata(self, wallet_environments: WalletTestFramework): + env = wallet_environments.environments[0] + wallet_node = env.node + wallet = env.xch_wallet + + env.wallet_aliases = { + "xch": 1, + "did": 2, + } - await server_2.start_client(PeerInfo(self_hostname, server_1.get_port()), None) - await server_3.start_client(PeerInfo(self_hostname, server_1.get_port()), None) - expected_confirmed_balance = await full_node_api.farm_blocks_to_wallet(count=2, wallet=wallet) + fee = uint64(1000) did_amount = uint64(101) - async with wallet.wallet_state_manager.new_action_scope(DEFAULT_TX_CONFIG, push=True) as action_scope: + async with wallet.wallet_state_manager.new_action_scope( + wallet_environments.tx_config, push=True + ) as action_scope: did_wallet_1: DIDWallet = await DIDWallet.create_new_did_wallet( wallet_node.wallet_state_manager, wallet, did_amount, action_scope, [], fee=fee ) - await full_node_api.process_transaction_records(records=action_scope.side_effects.transactions) - await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node, timeout=15) - expected_confirmed_balance -= did_amount + fee - assert await did_wallet_1.get_confirmed_balance() == did_amount - assert await did_wallet_1.get_unconfirmed_balance() == did_amount - assert await wallet.get_confirmed_balance() == expected_confirmed_balance - assert await wallet.get_unconfirmed_balance() == expected_confirmed_balance + await wallet_environments.process_pending_states( + [ + WalletStateTransition( + pre_block_balance_updates={ + "xch": { + "set_remainder": True, + }, + "did": { + "init": True, + "unconfirmed_wallet_balance": 101, + "pending_change": 101, + "pending_coin_removal_count": 1, + }, + }, + post_block_balance_updates={ + "xch": { + "set_remainder": True, + }, + "did": { + "confirmed_wallet_balance": 101, + "spendable_balance": 101, + "max_send_amount": 101, + "unspent_coin_count": 1, + "pending_change": -101, + "pending_coin_removal_count": -1, + }, + }, + ), + ] + ) + + assert did_wallet_1.did_info.current_inner is not None # mypy puzhash = did_wallet_1.did_info.current_inner.get_tree_hash() parent_num = get_parent_num(did_wallet_1) bad_metadata = {"Twitter": {"url": "http://www.twitter.com"}} with pytest.raises(ValueError) as e: - await did_wallet_1.update_metadata(bad_metadata) + await did_wallet_1.update_metadata(bad_metadata) # type: ignore assert e.match("Metadata key value pairs must be strings.") metadata = {} metadata["Twitter"] = "http://www.twitter.com" await did_wallet_1.update_metadata(metadata) - async with did_wallet_1.wallet_state_manager.new_action_scope(DEFAULT_TX_CONFIG, push=True) as action_scope: + async with did_wallet_1.wallet_state_manager.new_action_scope( + wallet_environments.tx_config, push=True + ) as action_scope: await did_wallet_1.create_update_spend(action_scope, fee) - await full_node_api.process_transaction_records(records=action_scope.side_effects.transactions) - await full_node_api.wait_for_wallets_synced(wallet_nodes=[wallet_node, wallet_node_2]) - expected_confirmed_balance -= fee - - await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node, timeout=15) - - assert await did_wallet_1.get_confirmed_balance() == did_amount - assert await did_wallet_1.get_unconfirmed_balance() == did_amount + await wallet_environments.process_pending_states( + [ + WalletStateTransition( + pre_block_balance_updates={ + "xch": { + "unconfirmed_wallet_balance": -fee, + "set_remainder": True, + }, + "did": { + "unconfirmed_wallet_balance": 0, + "set_remainder": True, + }, + }, + post_block_balance_updates={ + "xch": { + "confirmed_wallet_balance": -fee, + "set_remainder": True, + }, + "did": { + "confirmed_wallet_balance": 0, + "set_remainder": True, + }, + }, + ), + ] + ) assert get_parent_num(did_wallet_1) == parent_num + 2 + assert did_wallet_1.did_info.current_inner is not None # mypy assert puzhash != did_wallet_1.did_info.current_inner.get_tree_hash() - assert await wallet.get_confirmed_balance() == expected_confirmed_balance - assert await wallet.get_unconfirmed_balance() == expected_confirmed_balance - assert did_wallet_1.did_info.metadata.find("Twitter") > 0 - @pytest.mark.parametrize( - "trusted", - [True, False], - ) + @pytest.mark.limit_consensus_modes(allowed=[ConsensusMode.PLAIN], reason="irrelevant") + @pytest.mark.parametrize("wallet_environments", [{"num_environments": 1, "blocks_needed": [1]}], indirect=True) @pytest.mark.anyio - async def test_did_sign_message(self, self_hostname, two_wallet_nodes, trusted): + async def test_did_sign_message(self, wallet_environments: WalletTestFramework): + env = wallet_environments.environments[0] + wallet_node = env.node + wallet = env.xch_wallet + api_0 = env.rpc_api + + env.wallet_aliases = { + "xch": 1, + "did": 2, + } fee = uint64(1000) - full_nodes, wallets, _ = two_wallet_nodes - full_node_api = full_nodes[0] - server_1 = full_node_api.server - wallet_node, server_2 = wallets[0] - wallet_node_2, server_3 = wallets[1] - wallet = wallet_node.wallet_state_manager.main_wallet - api_0 = WalletRpcApi(wallet_node) ph = await wallet.get_new_puzzlehash() - if trusted: - wallet_node.config["trusted_peers"] = { - full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex() - } - wallet_node_2.config["trusted_peers"] = { - full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex() - } - else: - wallet_node.config["trusted_peers"] = {} - wallet_node_2.config["trusted_peers"] = {} - - await server_2.start_client(PeerInfo(self_hostname, server_1.get_port()), None) - await server_3.start_client(PeerInfo(self_hostname, server_1.get_port()), None) - await full_node_api.farm_blocks_to_wallet(1, wallet) - - async with wallet.wallet_state_manager.new_action_scope(DEFAULT_TX_CONFIG, push=True) as action_scope: + async with wallet.wallet_state_manager.new_action_scope( + wallet_environments.tx_config, push=True + ) as action_scope: did_wallet_1: DIDWallet = await DIDWallet.create_new_did_wallet( wallet_node.wallet_state_manager, wallet, uint64(101), action_scope, - [bytes(ph)], + [ph], uint64(1), {"Twitter": "Test", "GitHub": "测试"}, fee=fee, ) assert did_wallet_1.get_name() == "Profile 1" - await full_node_api.process_transaction_records(records=action_scope.side_effects.transactions) - await full_node_api.wait_for_wallets_synced(wallet_nodes=[wallet_node, wallet_node_2]) - await time_out_assert(15, did_wallet_1.get_confirmed_balance, 101) + + await wallet_environments.process_pending_states( + [ + WalletStateTransition( + pre_block_balance_updates={ + "xch": { + "set_remainder": True, + }, + "did": { + "init": True, + "unconfirmed_wallet_balance": 101, + "pending_change": 101, + "pending_coin_removal_count": 1, + }, + }, + post_block_balance_updates={ + "xch": { + "set_remainder": True, + }, + "did": { + "confirmed_wallet_balance": 101, + "spendable_balance": 101, + "max_send_amount": 101, + "unspent_coin_count": 1, + "pending_change": -101, + "pending_coin_removal_count": -1, + }, + }, + ), + ] + ) # Test general string + assert did_wallet_1.did_info.origin_coin is not None # mypy message = "Hello World" response = await api_0.sign_message_by_id( { @@ -1213,7 +1832,7 @@ async def test_did_sign_message(self, self_hostname, two_wallet_nodes, trusted): "is_hex": True, } ) - puzzle: Program = Program.to((CHIP_0002_SIGN_MESSAGE_PREFIX, bytes.fromhex(message))) + puzzle = Program.to((CHIP_0002_SIGN_MESSAGE_PREFIX, bytes.fromhex(message))) assert AugSchemeMPL.verify( G1Element.from_bytes(bytes.fromhex(response["pubkey"])), @@ -1336,6 +1955,8 @@ async def test_create_did_with_recovery_list(self, self_hostname, two_nodes_two_ # == json.loads(all_node_1_wallets[1].data)["current_inner"] # ) + # TODO: See Issue CHIA-1544 + # This test should be ported to WalletTestFramework once we can replace keys in the wallet node @pytest.mark.parametrize( "trusted", [True, False], diff --git a/chia/_tests/wallet/nft_wallet/test_nft_wallet.py b/chia/_tests/wallet/nft_wallet/test_nft_wallet.py index a3b726c2d33d..5d1bc8464491 100644 --- a/chia/_tests/wallet/nft_wallet/test_nft_wallet.py +++ b/chia/_tests/wallet/nft_wallet/test_nft_wallet.py @@ -876,6 +876,7 @@ async def test_nft_with_did_wallet_creation(wallet_environments: WalletTestFrame "spendable_balance": -1, "pending_change": 1, "pending_coin_removal_count": 1, + "max_send_amount": -1, }, "nft_w_did": {"pending_coin_removal_count": 1}, }, @@ -885,6 +886,7 @@ async def test_nft_with_did_wallet_creation(wallet_environments: WalletTestFrame "spendable_balance": 1, "pending_change": -1, "pending_coin_removal_count": -1, + "max_send_amount": 1, }, "nft_w_did": {"pending_coin_removal_count": -1, "unspent_coin_count": 1}, }, @@ -1042,6 +1044,7 @@ async def test_nft_rpc_mint(wallet_environments: WalletTestFramework) -> None: "spendable_balance": -1, "pending_change": 1, "pending_coin_removal_count": 1, + "max_send_amount": -1, }, "nft_w_did": {"pending_coin_removal_count": 1}, }, @@ -1051,6 +1054,7 @@ async def test_nft_rpc_mint(wallet_environments: WalletTestFramework) -> None: "spendable_balance": 1, "pending_change": -1, "pending_coin_removal_count": -1, + "max_send_amount": 1, }, "nft_w_did": {"pending_coin_removal_count": -1, "unspent_coin_count": 1}, }, @@ -1169,6 +1173,7 @@ async def test_nft_transfer_nft_with_did(wallet_environments: WalletTestFramewor "spendable_balance": -1, "pending_change": 1, "pending_coin_removal_count": 1, + "max_send_amount": -1, }, "nft": {"pending_coin_removal_count": 1}, }, @@ -1184,6 +1189,7 @@ async def test_nft_transfer_nft_with_did(wallet_environments: WalletTestFramewor "spendable_balance": 1, "pending_change": -1, "pending_coin_removal_count": -1, + "max_send_amount": 1, }, "nft": {"pending_coin_removal_count": -1, "unspent_coin_count": 1}, }, @@ -1218,6 +1224,7 @@ async def test_nft_transfer_nft_with_did(wallet_environments: WalletTestFramewor "unconfirmed_wallet_balance": -1, "spendable_balance": -1, "pending_coin_removal_count": 1, + "max_send_amount": -1, } }, post_block_balance_updates={}, # DID wallet is deleted @@ -1313,6 +1320,7 @@ async def test_nft_transfer_nft_with_did(wallet_environments: WalletTestFramewor "spendable_balance": -1, "pending_change": 1, "pending_coin_removal_count": 1, + "max_send_amount": -1, }, "nft": {"pending_coin_removal_count": 1}, }, @@ -1328,6 +1336,7 @@ async def test_nft_transfer_nft_with_did(wallet_environments: WalletTestFramewor "spendable_balance": 1, "pending_change": -1, "pending_coin_removal_count": -1, + "max_send_amount": 1, }, "nft": {"pending_coin_removal_count": -1, "unspent_coin_count": -1}, "nft_w_did": {"init": True, "unspent_coin_count": 1}, @@ -1430,6 +1439,7 @@ async def test_update_metadata_for_nft_did(wallet_environments: WalletTestFramew "spendable_balance": -1, "pending_change": 1, "pending_coin_removal_count": 1, + "max_send_amount": -1, }, "nft": {"pending_coin_removal_count": 1}, }, @@ -1445,6 +1455,7 @@ async def test_update_metadata_for_nft_did(wallet_environments: WalletTestFramew "spendable_balance": 1, "pending_change": -1, "pending_coin_removal_count": -1, + "max_send_amount": 1, }, "nft": {"pending_coin_removal_count": -1, "unspent_coin_count": 1}, }, @@ -1604,6 +1615,7 @@ async def test_nft_bulk_set_did(wallet_environments: WalletTestFramework) -> Non "spendable_balance": -1, "pending_change": 1, "pending_coin_removal_count": 1, + "max_send_amount": -1, }, "nft_w_did": {"pending_coin_removal_count": 1}, }, @@ -1619,6 +1631,7 @@ async def test_nft_bulk_set_did(wallet_environments: WalletTestFramework) -> Non "spendable_balance": 1, "pending_change": -1, "pending_coin_removal_count": -1, + "max_send_amount": 1, }, "nft_w_did": {"pending_coin_removal_count": -1, "unspent_coin_count": 1}, }, @@ -1694,6 +1707,7 @@ async def test_nft_bulk_set_did(wallet_environments: WalletTestFramework) -> Non "spendable_balance": -1, "pending_change": 1, "pending_coin_removal_count": 1, + "max_send_amount": -1, }, "nft_w_did": {"pending_coin_removal_count": 1}, }, @@ -1709,6 +1723,7 @@ async def test_nft_bulk_set_did(wallet_environments: WalletTestFramework) -> Non "spendable_balance": 1, "pending_change": -1, "pending_coin_removal_count": -1, + "max_send_amount": 1, }, "nft_w_did": {"pending_coin_removal_count": -1, "unspent_coin_count": 1}, }, @@ -1761,6 +1776,7 @@ async def test_nft_bulk_set_did(wallet_environments: WalletTestFramework) -> Non "spendable_balance": -1, "pending_change": 1, "pending_coin_removal_count": 1, + "max_send_amount": -1, }, "nft_w_did": {"pending_coin_removal_count": 2}, "nft_no_did": {"pending_coin_removal_count": 1}, @@ -1777,6 +1793,7 @@ async def test_nft_bulk_set_did(wallet_environments: WalletTestFramework) -> Non "spendable_balance": 1, "pending_change": -1, "pending_coin_removal_count": -1, + "max_send_amount": 1, }, "nft_w_did": {"pending_coin_removal_count": -2, "unspent_coin_count": 1}, "nft_no_did": {"pending_coin_removal_count": -1, "unspent_coin_count": -1}, @@ -1900,6 +1917,7 @@ async def test_nft_bulk_transfer(wallet_environments: WalletTestFramework) -> No "spendable_balance": -1, "pending_change": 1, "pending_coin_removal_count": 1, + "max_send_amount": -1, }, "nft_w_did": {"pending_coin_removal_count": 1}, }, @@ -1915,6 +1933,7 @@ async def test_nft_bulk_transfer(wallet_environments: WalletTestFramework) -> No "spendable_balance": 1, "pending_change": -1, "pending_coin_removal_count": -1, + "max_send_amount": 1, }, "nft_w_did": {"pending_coin_removal_count": -1, "unspent_coin_count": 1}, }, @@ -1990,6 +2009,7 @@ async def test_nft_bulk_transfer(wallet_environments: WalletTestFramework) -> No "spendable_balance": -1, "pending_change": 1, "pending_coin_removal_count": 1, + "max_send_amount": -1, }, "nft_w_did": {"pending_coin_removal_count": 1}, }, @@ -2005,6 +2025,7 @@ async def test_nft_bulk_transfer(wallet_environments: WalletTestFramework) -> No "spendable_balance": 1, "pending_change": -1, "pending_coin_removal_count": -1, + "max_send_amount": 1, }, "nft_w_did": {"pending_coin_removal_count": -1, "unspent_coin_count": 1}, }, @@ -2226,6 +2247,7 @@ async def test_nft_set_did(wallet_environments: WalletTestFramework) -> None: "spendable_balance": -1, "pending_change": 1, "pending_coin_removal_count": 1, + "max_send_amount": -1, }, "nft_no_did": {"pending_coin_removal_count": 1}, }, @@ -2235,6 +2257,7 @@ async def test_nft_set_did(wallet_environments: WalletTestFramework) -> None: "spendable_balance": 1, "pending_change": -1, "pending_coin_removal_count": -1, + "max_send_amount": 1, }, "nft_no_did": {"pending_coin_removal_count": -1, "unspent_coin_count": -1}, "nft_w_did1": {"unspent_coin_count": 1}, @@ -2278,6 +2301,7 @@ async def test_nft_set_did(wallet_environments: WalletTestFramework) -> None: "spendable_balance": -1, "pending_change": 1, "pending_coin_removal_count": 1, + "max_send_amount": -1, }, "nft_w_did1": {"pending_coin_removal_count": 1}, }, @@ -2287,6 +2311,7 @@ async def test_nft_set_did(wallet_environments: WalletTestFramework) -> None: "spendable_balance": 1, "pending_change": -1, "pending_coin_removal_count": -1, + "max_send_amount": 1, }, "nft_w_did1": {"pending_coin_removal_count": -1, "unspent_coin_count": -1}, "nft_w_did2": {"init": True, "unspent_coin_count": 1}, diff --git a/chia/_tests/wallet/vc_wallet/test_vc_wallet.py b/chia/_tests/wallet/vc_wallet/test_vc_wallet.py index 34ff97f76f82..659bb9998715 100644 --- a/chia/_tests/wallet/vc_wallet/test_vc_wallet.py +++ b/chia/_tests/wallet/vc_wallet/test_vc_wallet.py @@ -179,7 +179,7 @@ async def test_vc_lifecycle(wallet_environments: WalletTestFramework) -> None: # One existing coin has been removed and two ephemeral coins have been removed # Does pending_coin_removal_count attempt to show the number of current pending removals # Or does it intend to just mean all pending removals that we should eventually get states for? - "pending_coin_removal_count": 5, # 4 for VC mint, 1 for DID mint + "pending_coin_removal_count": 3, "<=#spendable_balance": -1_750_000_000_002, "<=#max_send_amount": -1_750_000_000_002, "set_remainder": True, @@ -200,7 +200,7 @@ async def test_vc_lifecycle(wallet_environments: WalletTestFramework) -> None: "xch": { # 1_750_000_000_000 for VC mint fee, 1 for VC singleton, 1 for DID mint "confirmed_wallet_balance": -1_750_000_000_002, - "pending_coin_removal_count": -5, # 3 for VC mint, 1 for DID mint + "pending_coin_removal_count": -3, # 3 for VC mint, 1 for DID mint "set_remainder": True, }, "did": { @@ -241,6 +241,7 @@ async def test_vc_lifecycle(wallet_environments: WalletTestFramework) -> None: "spendable_balance": -1, "pending_change": 1, "pending_coin_removal_count": 1, + "max_send_amount": -1, }, "vc": { "pending_coin_removal_count": 1, @@ -256,6 +257,7 @@ async def test_vc_lifecycle(wallet_environments: WalletTestFramework) -> None: "spendable_balance": 1, "pending_change": -1, "pending_coin_removal_count": -1, + "max_send_amount": 1, }, "vc": { "pending_coin_removal_count": -1, @@ -600,6 +602,7 @@ async def test_vc_lifecycle(wallet_environments: WalletTestFramework) -> None: "spendable_balance": -1, "pending_change": 1, "pending_coin_removal_count": 1, + "max_send_amount": -1, }, }, post_block_balance_updates={ @@ -612,6 +615,7 @@ async def test_vc_lifecycle(wallet_environments: WalletTestFramework) -> None: "spendable_balance": 1, "pending_change": -1, "pending_coin_removal_count": -1, + "max_send_amount": 1, }, }, ), diff --git a/chia/wallet/did_wallet/did_wallet.py b/chia/wallet/did_wallet/did_wallet.py index d785ce4ee401..1f2c63564f59 100644 --- a/chia/wallet/did_wallet/did_wallet.py +++ b/chia/wallet/did_wallet/did_wallet.py @@ -326,7 +326,9 @@ async def get_pending_change_balance(self) -> uint64: continue for coin in record.additions: - if await self.wallet_state_manager.does_coin_belong_to_wallet(coin, self.id()): + if (await self.wallet_state_manager.does_coin_belong_to_wallet(coin, self.id())) and ( + coin not in record.removals + ): addition_amount += coin.amount return uint64(addition_amount) @@ -1329,8 +1331,10 @@ async def get_spendable_balance(self, unspent_records=None) -> uint128: return spendable_am async def get_max_send_amount(self, records: Optional[Set[WalletCoinRecord]] = None): - max_send_amount = await self.get_confirmed_balance() - + spendable: List[WalletCoinRecord] = list( + await self.wallet_state_manager.get_spendable_coins_for_wallet(self.id(), records) + ) + max_send_amount = sum(cr.coin.amount for cr in spendable) return max_send_amount async def add_parent(self, name: bytes32, parent: Optional[LineageProof]): diff --git a/chia/wallet/wallet_state_manager.py b/chia/wallet/wallet_state_manager.py index e97816e4514d..83dd0a06820c 100644 --- a/chia/wallet/wallet_state_manager.py +++ b/chia/wallet/wallet_state_manager.py @@ -779,7 +779,8 @@ async def unconfirmed_removals_for_wallet(self, wallet_id: int) -> Dict[bytes32, # That is reserved for when the action to actually claw a tx back or forward is initiated. continue for coin in record.removals: - removals[coin.name()] = coin + if coin not in record.additions: + removals[coin.name()] = coin trade_removals: Dict[bytes32, WalletCoinRecord] = await self.trade_manager.get_locked_coins() return {**removals, **{coin_id: cr.coin for coin_id, cr in trade_removals.items() if cr.wallet_id == wallet_id}} From 7c814b17134c7559c482fb29e2e971f754001fc7 Mon Sep 17 00:00:00 2001 From: Amine Khaldi Date: Thu, 3 Oct 2024 16:26:54 +0100 Subject: [PATCH 19/69] CHIA-1554 Annotate test_taproot.py (#18661) Annotate test_taproot.py. --- chia/_tests/wallet/test_taproot.py | 2 +- mypy-exclusions.txt | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/chia/_tests/wallet/test_taproot.py b/chia/_tests/wallet/test_taproot.py index ad1fe257a6a6..d85b6bfa64d6 100644 --- a/chia/_tests/wallet/test_taproot.py +++ b/chia/_tests/wallet/test_taproot.py @@ -8,7 +8,7 @@ ) -def test_1(): +def test_1() -> None: for main_secret_exponent in range(500, 600): hidden_puzzle_hash = DEFAULT_HIDDEN_PUZZLE.get_tree_hash() main_pubkey = int_to_public_key(main_secret_exponent) diff --git a/mypy-exclusions.txt b/mypy-exclusions.txt index 61de8eac8919..090040cf1778 100644 --- a/mypy-exclusions.txt +++ b/mypy-exclusions.txt @@ -93,6 +93,5 @@ chia._tests.util.test_network chia._tests.util.time_out_assert chia._tests.wallet.did_wallet.test_did chia._tests.wallet.rpc.test_wallet_rpc -chia._tests.wallet.test_taproot tools.analyze-chain tools.run_block From 40e0df2c2517692c848f58240460ba53b37b7a04 Mon Sep 17 00:00:00 2001 From: Amine Khaldi Date: Thu, 3 Oct 2024 16:28:07 +0100 Subject: [PATCH 20/69] CHIA-1472 Augment get_mirror_info to accept both types of programs (#18637) Augment get_mirror_info to accept both types of programs. This allows us to use serialized programs without the need to convert them into programs. --- chia/data_layer/data_layer_wallet.py | 4 +--- chia/wallet/db_wallet/db_wallet_puzzles.py | 9 ++++++--- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/chia/data_layer/data_layer_wallet.py b/chia/data_layer/data_layer_wallet.py index 588220dbd511..a6cd126f57d8 100644 --- a/chia/data_layer/data_layer_wallet.py +++ b/chia/data_layer/data_layer_wallet.py @@ -808,9 +808,7 @@ async def coin_added(self, coin: Coin, height: uint32, peer: WSChiaConnection, c )[0] parent_spend = await fetch_coin_spend(height, parent_state.coin, peer) assert parent_spend is not None - launcher_id, urls = get_mirror_info( - parent_spend.puzzle_reveal.to_program(), parent_spend.solution.to_program() - ) + launcher_id, urls = get_mirror_info(parent_spend.puzzle_reveal, parent_spend.solution) # Don't track mirrors with empty url list. if not urls: return diff --git a/chia/wallet/db_wallet/db_wallet_puzzles.py b/chia/wallet/db_wallet/db_wallet_puzzles.py index fde613dff1e8..af39088670e3 100644 --- a/chia/wallet/db_wallet/db_wallet_puzzles.py +++ b/chia/wallet/db_wallet/db_wallet_puzzles.py @@ -2,7 +2,7 @@ from typing import Iterator, List, Tuple, Union -from chia.types.blockchain_format.program import Program +from chia.types.blockchain_format.program import INFINITE_COST, Program from chia.types.blockchain_format.serialized_program import SerializedProgram from chia.types.blockchain_format.sized_bytes import bytes32 from chia.types.condition_opcodes import ConditionOpcode @@ -94,8 +94,11 @@ def create_mirror_puzzle() -> Program: MIRROR_PUZZLE_HASH = create_mirror_puzzle().get_tree_hash() -def get_mirror_info(parent_puzzle: Program, parent_solution: Program) -> Tuple[bytes32, List[bytes]]: - conditions = parent_puzzle.run(parent_solution) +def get_mirror_info( + parent_puzzle: Union[Program, SerializedProgram], parent_solution: Union[Program, SerializedProgram] +) -> Tuple[bytes32, List[bytes]]: + assert type(parent_puzzle) is type(parent_solution) + _, conditions = parent_puzzle.run_with_cost(INFINITE_COST, parent_solution) for condition in conditions.as_iter(): if ( condition.first().as_python() == ConditionOpcode.CREATE_COIN From 81824c9b344517b7bb47f6a1d2c62a6e063455ab Mon Sep 17 00:00:00 2001 From: Matt Hauff Date: Thu, 3 Oct 2024 08:28:58 -0700 Subject: [PATCH 21/69] [CHIA-1308] Port wallet node RPC endpoints to `@marshal` decorator (#18651) * Port `set_wallet_resync_on_startup` * Port `get_sync_status` * Port `get_height_info` * Port `push_tx` * Port `push_transactions` * Delete `farm_block` * Port `get_timestamp_for_height` * Port `get_auto_claim` and set_auto_claim` * pylint * fix test * pylint again * typo Co-authored-by: Kyle Altendorf * clearer comment Co-authored-by: Kyle Altendorf --------- Co-authored-by: Kyle Altendorf --- chia/_tests/cmds/cmd_test_utils.py | 10 +-- chia/_tests/cmds/wallet/test_coins.py | 6 +- chia/_tests/cmds/wallet/test_wallet.py | 6 +- .../wallet/cat_wallet/test_cat_wallet.py | 8 +- chia/_tests/wallet/rpc/test_wallet_rpc.py | 65 ++++++++-------- .../wallet/test_wallet_state_manager.py | 9 ++- chia/cmds/cmds_util.py | 5 +- chia/cmds/coin_funcs.py | 6 +- chia/cmds/plotnft_funcs.py | 4 +- chia/cmds/signer.py | 13 +++- chia/cmds/wallet_funcs.py | 11 ++- chia/rpc/wallet_request_types.py | 70 ++++++++++++++++++ chia/rpc/wallet_rpc_api.py | 74 ++++++++++--------- chia/rpc/wallet_rpc_client.py | 62 ++++++++-------- 14 files changed, 219 insertions(+), 130 deletions(-) diff --git a/chia/_tests/cmds/cmd_test_utils.py b/chia/_tests/cmds/cmd_test_utils.py index 28cee7ba6207..5a3032589000 100644 --- a/chia/_tests/cmds/cmd_test_utils.py +++ b/chia/_tests/cmds/cmd_test_utils.py @@ -19,7 +19,7 @@ from chia.rpc.farmer_rpc_client import FarmerRpcClient from chia.rpc.full_node_rpc_client import FullNodeRpcClient from chia.rpc.rpc_client import RpcClient -from chia.rpc.wallet_request_types import SendTransactionMultiResponse +from chia.rpc.wallet_request_types import GetSyncStatusResponse, SendTransactionMultiResponse from chia.rpc.wallet_rpc_client import WalletRpcClient from chia.simulator.simulator_full_node_rpc_client import SimulatorFullNodeRpcClient from chia.types.blockchain_format.sized_bytes import bytes32 @@ -82,13 +82,9 @@ class TestWalletRpcClient(TestRpcClient): fingerprint: int = field(init=False, default=0) wallet_index: int = field(init=False, default=0) - async def get_sync_status(self) -> bool: + async def get_sync_status(self) -> GetSyncStatusResponse: self.add_to_log("get_sync_status", ()) - return False - - async def get_synced(self) -> bool: - self.add_to_log("get_synced", ()) - return True + return GetSyncStatusResponse(synced=True, syncing=False) async def get_wallets(self, wallet_type: Optional[WalletType] = None) -> List[Dict[str, Union[str, int]]]: self.add_to_log("get_wallets", (wallet_type,)) diff --git a/chia/_tests/cmds/wallet/test_coins.py b/chia/_tests/cmds/wallet/test_coins.py index 751140808510..a649b82f189a 100644 --- a/chia/_tests/cmds/wallet/test_coins.py +++ b/chia/_tests/cmds/wallet/test_coins.py @@ -35,7 +35,7 @@ def test_coins_get_info(capsys: object, get_test_cli_clients: Tuple[TestRpcClien run_cli_command_and_assert(capsys, root_dir, command_args, assert_list) expected_calls: logType = { "get_wallets": [(None,)], - "get_synced": [()], + "get_sync_status": [()], "get_spendable_coins": [ ( 1, @@ -114,7 +114,7 @@ async def combine_coins( ) expected_calls: logType = { "get_wallets": [(None,)], - "get_synced": [()], + "get_sync_status": [()], "combine_coins": [ ( expected_request, @@ -168,7 +168,7 @@ async def split_coins( run_cli_command_and_assert(capsys, root_dir, command_args, assert_list) expected_calls: logType = { "get_wallets": [(None,)], - "get_synced": [()], + "get_sync_status": [()], "split_coins": [ ( SplitCoins( diff --git a/chia/_tests/cmds/wallet/test_wallet.py b/chia/_tests/cmds/wallet/test_wallet.py index d4cdab8c1f96..76c1f35e287c 100644 --- a/chia/_tests/cmds/wallet/test_wallet.py +++ b/chia/_tests/cmds/wallet/test_wallet.py @@ -27,6 +27,7 @@ CancelOfferResponse, CATSpendResponse, CreateOfferForIDsResponse, + GetHeightInfoResponse, SendTransactionResponse, TakeOfferResponse, ) @@ -227,9 +228,9 @@ async def get_wallets(self, wallet_type: Optional[WalletType] = None) -> List[Di return [wallet_list[1]] return wallet_list - async def get_height_info(self) -> uint32: + async def get_height_info(self) -> GetHeightInfoResponse: self.add_to_log("get_height_info", ()) - return uint32(10) + return GetHeightInfoResponse(uint32(10)) async def get_wallet_balance(self, wallet_id: int) -> Dict[str, uint64]: self.add_to_log("get_wallet_balance", (wallet_id,)) @@ -291,7 +292,6 @@ async def get_connections( # these are various things that should be in the output expected_calls: logType = { "get_wallets": [(None,), (WalletType.CAT,)], - "get_synced": [(), ()], "get_sync_status": [(), ()], "get_height_info": [(), ()], "get_wallet_balance": [(1,), (2,), (3,), (2,)], diff --git a/chia/_tests/wallet/cat_wallet/test_cat_wallet.py b/chia/_tests/wallet/cat_wallet/test_cat_wallet.py index 041b6fc24539..03a3c338f7db 100644 --- a/chia/_tests/wallet/cat_wallet/test_cat_wallet.py +++ b/chia/_tests/wallet/cat_wallet/test_cat_wallet.py @@ -9,7 +9,7 @@ from chia._tests.environments.wallet import WalletEnvironment, WalletStateTransition, WalletTestFramework from chia._tests.util.time_out_assert import time_out_assert, time_out_assert_not_none from chia.protocols.wallet_protocol import CoinState -from chia.rpc.wallet_request_types import GetTransactionMemo +from chia.rpc.wallet_request_types import GetTransactionMemo, PushTX from chia.simulator.simulator_protocol import ReorgProtocol from chia.types.blockchain_format.coin import Coin, coin_as_list from chia.types.blockchain_format.program import Program @@ -1555,7 +1555,7 @@ async def test_cat_change_detection(wallet_environments: WalletTestFramework) -> ), ], ) - await env.rpc_client.push_tx(eve_spend) + await env.rpc_client.push_tx(PushTX(bytes(eve_spend))) await time_out_assert_not_none(5, full_node_api.full_node.mempool_manager.get_spendbundle, eve_spend.name()) await wallet_environments.process_pending_states( [ @@ -1667,7 +1667,7 @@ async def test_cat_melt_balance(wallet_environments: WalletTestFramework) -> Non ) ], ) - await env.rpc_client.push_tx(spend_to_wallet) + await env.rpc_client.push_tx(PushTX(bytes(spend_to_wallet))) await time_out_assert(10, simulator.tx_id_in_mempool, True, spend_to_wallet.name()) await wallet_environments.process_pending_states( @@ -1717,7 +1717,7 @@ async def test_cat_melt_balance(wallet_environments: WalletTestFramework) -> Non ], ) signed_spend, _ = await env.wallet_state_manager.sign_bundle(new_spend.coin_spends) - await env.rpc_client.push_tx(signed_spend) + await env.rpc_client.push_tx(PushTX(bytes(signed_spend))) await time_out_assert(10, simulator.tx_id_in_mempool, True, signed_spend.name()) await wallet_environments.process_pending_states( diff --git a/chia/_tests/wallet/rpc/test_wallet_rpc.py b/chia/_tests/wallet/rpc/test_wallet_rpc.py index 98b829a93008..98277d3669d2 100644 --- a/chia/_tests/wallet/rpc/test_wallet_rpc.py +++ b/chia/_tests/wallet/rpc/test_wallet_rpc.py @@ -55,7 +55,10 @@ DIDGetPubkey, GetNotifications, GetPrivateKey, + GetTimestampForHeight, LogIn, + PushTransactions, + SetWalletResyncOnStartup, SplitCoins, SplitCoinsResponse, VerifySignature, @@ -139,6 +142,10 @@ class WalletRpcTestEnvironment: full_node: FullNodeBundle +async def check_client_synced(wallet_client: WalletRpcClient) -> bool: + return (await wallet_client.get_sync_status()).synced + + async def farm_transaction_block(full_node_api: FullNodeSimulator, wallet_node: WalletNode): await full_node_api.farm_blocks_to_puzzlehash(count=1) await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node, timeout=20) @@ -174,7 +181,7 @@ async def generate_funds(full_node_api: FullNodeSimulator, wallet_bundle: Wallet expected_unconfirmed = initial_balances["unconfirmed_wallet_balance"] + generated_funds await time_out_assert(20, get_confirmed_balance, expected_confirmed, wallet_bundle.rpc_client, wallet_id) await time_out_assert(20, get_unconfirmed_balance, expected_unconfirmed, wallet_bundle.rpc_client, wallet_id) - await time_out_assert(20, wallet_bundle.rpc_client.get_synced) + await time_out_assert(20, check_client_synced, True, wallet_bundle.rpc_client) return generated_funds @@ -405,29 +412,25 @@ async def test_push_transactions(wallet_rpc_environment: WalletRpcTestEnvironmen ) ).signed_tx - resp_client = await client.push_transactions([tx], fee=uint64(10)) + resp_client = await client.push_transactions( + PushTransactions(transactions=[tx], fee=uint64(10)), # pylint: disable=unexpected-keyword-arg + DEFAULT_TX_CONFIG, + ) resp = await client.fetch( "push_transactions", {"transactions": [tx.to_json_dict_convenience(wallet_node.config)], "fee": 10} ) assert resp["success"] - resp = await client.fetch("push_transactions", {"transactions": [tx.to_json_dict()], "fee": 10}) + resp = await client.fetch("push_transactions", {"transactions": [bytes(tx).hex()], "fee": 10}) assert resp["success"] spend_bundle = WalletSpendBundle.aggregate( - [ - # We ARE checking that the spend bundle is not None but mypy can't recognize this - TransactionRecord.from_json_dict_convenience(tx).spend_bundle # type: ignore[type-var] - for tx in resp_client["transactions"] - if tx["spend_bundle"] is not None - ] + [tx.spend_bundle for tx in resp_client.transactions if tx.spend_bundle is not None] ) assert spend_bundle is not None await farm_transaction(full_node_api, wallet_node, spend_bundle) - for tx_json in resp_client["transactions"]: - tx = TransactionRecord.from_json_dict_convenience(tx_json) - tx = await client.get_transaction(transaction_id=tx.name) - assert tx.confirmed + for tx in resp_client.transactions: + assert (await client.get_transaction(transaction_id=tx.name)).confirmed @pytest.mark.anyio @@ -461,7 +464,9 @@ async def test_get_farmed_amount(wallet_rpc_environment: WalletRpcTestEnvironmen await full_node_api.farm_blocks_to_wallet(2, wallet) get_farmed_amount_result = await wallet_rpc_client.get_farmed_amount() - get_timestamp_for_height_result = await wallet_rpc_client.get_timestamp_for_height(uint32(3)) # genesis + 2 + get_timestamp_for_height_result = await wallet_rpc_client.get_timestamp_for_height( + GetTimestampForHeight(uint32(3)) + ) # genesis + 2 expected_result = { "blocks_won": 2, @@ -469,7 +474,7 @@ async def test_get_farmed_amount(wallet_rpc_environment: WalletRpcTestEnvironmen "farmer_reward_amount": 500_000_000_000, "fee_amount": 0, "last_height_farmed": 3, - "last_time_farmed": get_timestamp_for_height_result, + "last_time_farmed": get_timestamp_for_height_result.timestamp, "pool_reward_amount": 3_500_000_000_000, "success": True, } @@ -513,8 +518,8 @@ async def test_get_timestamp_for_height(wallet_rpc_environment: WalletRpcTestEnv await generate_funds(full_node_api, env.wallet_1) - # This tests that the client returns a uint64, rather than raising or returning something unexpected - uint64(await client.get_timestamp_for_height(uint32(1))) + # This tests that the client returns successfully, rather than raising or returning something unexpected + await client.get_timestamp_for_height(GetTimestampForHeight(uint32(1))) @pytest.mark.parametrize( @@ -1044,7 +1049,7 @@ async def test_cat_endpoints(wallet_rpc_environment: WalletRpcTestEnvironment): # Creates a CAT wallet with 100 mojos and a CAT with 20 mojos and fee=10 await client.create_new_cat_and_wallet(uint64(100), fee=uint64(10), test=True) - await time_out_assert(20, client.get_synced) + await time_out_assert(20, check_client_synced, True, client) res = await client.create_new_cat_and_wallet(uint64(20), test=True) assert res["success"] @@ -1748,7 +1753,7 @@ async def test_key_and_address_endpoints(wallet_rpc_environment: WalletRpcTestEn await generate_funds(env.full_node.api, env.wallet_1) - assert (await client.get_height_info()) > 0 + assert (await client.get_height_info()).height > 0 ph = await wallet.get_new_puzzlehash() addr = encode_puzzle_hash(ph, "txch") @@ -1810,7 +1815,7 @@ async def test_key_and_address_endpoints(wallet_rpc_environment: WalletRpcTestEn await client.log_in(LogIn(uint32(pks[1]))) assert len((await client.get_public_keys()).pk_fingerprints) == 1 - assert not (await client.get_sync_status()) + assert not (await client.get_sync_status()).synced wallets = await client.get_wallets() assert len(wallets) == 1 @@ -2386,7 +2391,7 @@ async def test_set_wallet_resync_on_startup(wallet_rpc_environment: WalletRpcTes await wc.create_new_did_wallet(1, DEFAULT_TX_CONFIG, 0) await time_out_assert(5, check_mempool_spend_count, True, full_node_api, 1) await farm_transaction_block(full_node_api, env.wallet_1.node) - await time_out_assert(20, wc.get_synced) + await time_out_assert(20, check_client_synced, True, wc) nft_wallet = await wc.create_new_nft_wallet(None) nft_wallet_id = nft_wallet["wallet_id"] @@ -2401,7 +2406,7 @@ async def test_set_wallet_resync_on_startup(wallet_rpc_environment: WalletRpcTes ) await time_out_assert(5, check_mempool_spend_count, True, full_node_api, 1) await farm_transaction_block(full_node_api, env.wallet_1.node) - await time_out_assert(20, wc.get_synced) + await time_out_assert(20, check_client_synced, True, wc) wallet_node: WalletNode = env.wallet_1.node wallet_node_2: WalletNode = env.wallet_2.node @@ -2419,7 +2424,7 @@ async def test_set_wallet_resync_on_startup(wallet_rpc_environment: WalletRpcTes clawback_coin_id = tx.additions[0].name() assert tx.spend_bundle is not None await farm_transaction(full_node_api, wallet_node, tx.spend_bundle) - await time_out_assert(20, wc.get_synced) + await time_out_assert(20, check_client_synced, True, wc) await asyncio.sleep(10) resp = await wc.spend_clawback_coins([clawback_coin_id], 0) assert resp["success"] @@ -2428,11 +2433,11 @@ async def test_set_wallet_resync_on_startup(wallet_rpc_environment: WalletRpcTes 10, full_node_api.full_node.mempool_manager.get_spendbundle, bytes32.from_hexstr(resp["transaction_ids"][0]) ) await farm_transaction_block(full_node_api, wallet_node) - await time_out_assert(20, wc.get_synced) + await time_out_assert(20, check_client_synced, True, wc) wallet_node_2._close() await wallet_node_2._await_closed() # set flag to reset wallet sync data on start - await client.set_wallet_resync_on_startup() + await client.set_wallet_resync_on_startup(SetWalletResyncOnStartup()) fingerprint = wallet_node.logged_in_fingerprint assert wallet_node._wallet_state_manager # 2 reward coins, 1 DID, 1 NFT, 1 clawbacked coin @@ -2484,12 +2489,12 @@ async def test_set_wallet_resync_on_startup_disable(wallet_rpc_environment: Wall wallet_node_2._close() await wallet_node_2._await_closed() # set flag to reset wallet sync data on start - await client.set_wallet_resync_on_startup() + await client.set_wallet_resync_on_startup(SetWalletResyncOnStartup()) fingerprint = wallet_node.logged_in_fingerprint assert wallet_node._wallet_state_manager assert len(await wallet_node._wallet_state_manager.coin_store.get_all_unspent_coins()) == 2 before_txs = await wallet_node.wallet_state_manager.tx_store.get_all_transactions() - await client.set_wallet_resync_on_startup(False) + await client.set_wallet_resync_on_startup(SetWalletResyncOnStartup(False)) wallet_node._close() await wallet_node._await_closed() config = load_config(wallet_node.root_path, "config.yaml") @@ -2625,16 +2630,16 @@ async def test_get_balances(wallet_rpc_environment: WalletRpcTestEnvironment): await generate_funds(full_node_api, env.wallet_1, 1) - await time_out_assert(20, client.get_synced) + await time_out_assert(20, check_client_synced, True, client) # Creates a CAT wallet with 100 mojos and a CAT with 20 mojos await client.create_new_cat_and_wallet(uint64(100), test=True) - await time_out_assert(20, client.get_synced) + await time_out_assert(20, check_client_synced, True, client) res = await client.create_new_cat_and_wallet(uint64(20), test=True) assert res["success"] await time_out_assert(5, check_mempool_spend_count, True, full_node_api, 2) await farm_transaction_block(full_node_api, wallet_node) - await time_out_assert(20, client.get_synced) + await time_out_assert(20, check_client_synced, True, client) bal = await client.get_wallet_balances() assert len(bal) == 3 assert bal["1"]["confirmed_wallet_balance"] == 1999999999880 diff --git a/chia/_tests/wallet/test_wallet_state_manager.py b/chia/_tests/wallet/test_wallet_state_manager.py index 8dcafe07a78d..5b35023e41ed 100644 --- a/chia/_tests/wallet/test_wallet_state_manager.py +++ b/chia/_tests/wallet/test_wallet_state_manager.py @@ -9,6 +9,7 @@ from chia._tests.environments.wallet import WalletStateTransition, WalletTestFramework from chia._tests.util.setup_nodes import OldSimulatorsAndWallets from chia.protocols.wallet_protocol import CoinState +from chia.rpc.wallet_request_types import PushTransactions from chia.server.outbound_message import NodeType from chia.types.blockchain_format.coin import Coin from chia.types.blockchain_format.program import Program @@ -224,7 +225,13 @@ async def test_confirming_txs_not_ours(wallet_environments: WalletTestFramework) action_scope, ) - await env_2.rpc_client.push_transactions(action_scope.side_effects.transactions) + await env_2.rpc_client.push_transactions( + PushTransactions( # pylint: disable=unexpected-keyword-arg + transactions=action_scope.side_effects.transactions, + sign=False, + ), + wallet_environments.tx_config, + ) await wallet_environments.process_pending_states( [ diff --git a/chia/cmds/cmds_util.py b/chia/cmds/cmds_util.py index 15d2c09d4b80..a5f9b9dde668 100644 --- a/chia/cmds/cmds_util.py +++ b/chia/cmds/cmds_util.py @@ -177,9 +177,10 @@ async def get_wallet(root_path: Path, wallet_client: WalletRpcClient, fingerprin current_sync_status: str = "" indent = " " if logged_in_key is not None: - if await wallet_client.get_synced(): + sync_response = await wallet_client.get_sync_status() + if sync_response.synced: current_sync_status = "Synced" - elif await wallet_client.get_sync_status(): + elif sync_response.syncing: current_sync_status = "Syncing" else: current_sync_status = "Not Synced" diff --git a/chia/cmds/coin_funcs.py b/chia/cmds/coin_funcs.py index 35b321326a23..c768c246e225 100644 --- a/chia/cmds/coin_funcs.py +++ b/chia/cmds/coin_funcs.py @@ -40,7 +40,7 @@ async def async_list( except LookupError: print(f"Wallet id: {wallet_id} not found.") return - if not await wallet_client.get_synced(): + if not (await wallet_client.get_sync_status()).synced: print("Wallet not synced. Please wait.") return conf_coins, unconfirmed_removals, unconfirmed_additions = await wallet_client.get_spendable_coins( @@ -136,7 +136,7 @@ async def async_combine( except LookupError: print(f"Wallet id: {wallet_id} not found.") return [] - if not await wallet_client.get_synced(): + if not (await wallet_client.get_sync_status()).synced: print("Wallet not synced. Please wait.") return [] @@ -206,7 +206,7 @@ async def async_split( except LookupError: print(f"Wallet id: {wallet_id} not found.") return [] - if not await wallet_client.get_synced(): + if not (await wallet_client.get_sync_status()).synced: print("Wallet not synced. Please wait.") return [] diff --git a/chia/cmds/plotnft_funcs.py b/chia/cmds/plotnft_funcs.py index d0663ee0840c..2de6b9f18051 100644 --- a/chia/cmds/plotnft_funcs.py +++ b/chia/cmds/plotnft_funcs.py @@ -181,8 +181,8 @@ async def pprint_all_pool_wallet_state( address_prefix: str, pool_state_dict: Dict[bytes32, Dict[str, Any]], ) -> None: - print(f"Wallet height: {await wallet_client.get_height_info()}") - print(f"Sync status: {'Synced' if (await wallet_client.get_synced()) else 'Not synced'}") + print(f"Wallet height: {(await wallet_client.get_height_info()).height}") + print(f"Sync status: {'Synced' if (await wallet_client.get_sync_status()).synced else 'Not synced'}") for wallet_info in get_wallets_response: pool_wallet_id = wallet_info["id"] typ = WalletType(int(wallet_info["type"])) diff --git a/chia/cmds/signer.py b/chia/cmds/signer.py index 317191d78062..e710cd02547c 100644 --- a/chia/cmds/signer.py +++ b/chia/cmds/signer.py @@ -18,11 +18,17 @@ from chia.cmds.cmds_util import TransactionBundle from chia.cmds.wallet import wallet_cmd from chia.rpc.util import ALL_TRANSLATION_LAYERS -from chia.rpc.wallet_request_types import ApplySignatures, ExecuteSigningInstructions, GatherSigningInfo +from chia.rpc.wallet_request_types import ( + ApplySignatures, + ExecuteSigningInstructions, + GatherSigningInfo, + PushTransactions, +) from chia.util.streamable import Streamable from chia.wallet.signer_protocol import SignedTransaction, SigningInstructions, SigningResponse, Spend from chia.wallet.transaction_record import TransactionRecord from chia.wallet.util.clvm_streamable import byte_deserialize_clvm_streamable, byte_serialize_clvm_streamable +from chia.wallet.util.tx_config import DEFAULT_TX_CONFIG from chia.wallet.wallet_spend_bundle import WalletSpendBundle @@ -292,7 +298,10 @@ class PushTransactionsCMD: async def run(self) -> None: async with self.rpc_info.wallet_rpc() as wallet_rpc: - await wallet_rpc.client.push_transactions(self.txs_in.transaction_bundle.txs) + # TODO: provide access to additional parameters instead of filling with the defaults constant + await wallet_rpc.client.push_transactions( + PushTransactions(transactions=self.txs_in.transaction_bundle.txs), DEFAULT_TX_CONFIG + ) # Uncomment this for testing of qr code display diff --git a/chia/cmds/wallet_funcs.py b/chia/cmds/wallet_funcs.py index 01b7daa29a3c..3215a3f1c301 100644 --- a/chia/cmds/wallet_funcs.py +++ b/chia/cmds/wallet_funcs.py @@ -858,18 +858,17 @@ async def print_balances( summaries_response = await wallet_client.get_wallets(wallet_type) address_prefix = selected_network_address_prefix(config) - is_synced: bool = await wallet_client.get_synced() - is_syncing: bool = await wallet_client.get_sync_status() + sync_response = await wallet_client.get_sync_status() - print(f"Wallet height: {await wallet_client.get_height_info()}") - if is_syncing: + print(f"Wallet height: {(await wallet_client.get_height_info()).height}") + if sync_response.syncing: print("Sync status: Syncing...") - elif is_synced: + elif sync_response.synced: print("Sync status: Synced") else: print("Sync status: Not synced") - if not is_syncing and is_synced: + if not sync_response.syncing and sync_response.synced: if len(summaries_response) == 0: type_hint = " " if wallet_type is None else f" from type {wallet_type.name} " print(f"\nNo wallets{type_hint}available for fingerprint: {fingerprint}") diff --git a/chia/rpc/wallet_request_types.py b/chia/rpc/wallet_request_types.py index 701a0f984d5c..b2c264bf4f3e 100644 --- a/chia/rpc/wallet_request_types.py +++ b/chia/rpc/wallet_request_types.py @@ -10,6 +10,7 @@ from typing_extensions import dataclass_transform from chia.types.blockchain_format.sized_bytes import bytes32 +from chia.util.byte_types import hexstr_to_bytes from chia.util.ints import uint16, uint32, uint64 from chia.util.streamable import Streamable, streamable from chia.wallet.conditions import Condition, ConditionValidTimes @@ -147,6 +148,44 @@ class CheckDeleteKeyResponse(Streamable): wallet_balance: bool +@streamable +@dataclass(frozen=True) +class SetWalletResyncOnStartup(Streamable): + enable: bool = True + + +@streamable +@dataclass(frozen=True) +class GetSyncStatusResponse(Streamable): + synced: bool + syncing: bool + genesis_initialized: bool = True + + +@streamable +@dataclass(frozen=True) +class GetHeightInfoResponse(Streamable): + height: uint32 + + +@streamable +@dataclass(frozen=True) +class PushTX(Streamable): + spend_bundle: bytes + + +@streamable +@dataclass(frozen=True) +class GetTimestampForHeight(Streamable): + height: uint32 + + +@streamable +@dataclass(frozen=True) +class GetTimestampForHeightResponse(Streamable): + timestamp: uint64 + + @streamable @dataclass(frozen=True) class GetNotifications(Streamable): @@ -385,6 +424,7 @@ class ExecuteSigningInstructionsResponse(Streamable): class TransactionEndpointRequest(Streamable): fee: uint64 = uint64(0) push: Optional[bool] = None + sign: Optional[bool] = None def to_json_dict(self, _avoid_ban: bool = False) -> Dict[str, Any]: if not _avoid_ban: @@ -412,6 +452,36 @@ class TransactionEndpointResponse(Streamable): transactions: List[TransactionRecord] +@streamable +@dataclass(frozen=True) +class PushTransactions(TransactionEndpointRequest): + transactions: List[TransactionRecord] = field(default_factory=default_raise) + push: Optional[bool] = True + + # We allow for flexibility in transaction parsing here so we need to override + @classmethod + def from_json_dict(cls, json_dict: Dict[str, Any]) -> PushTransactions: + transactions: List[TransactionRecord] = [] + for transaction_hexstr_or_json in json_dict["transactions"]: + if isinstance(transaction_hexstr_or_json, str): + tx = TransactionRecord.from_bytes(hexstr_to_bytes(transaction_hexstr_or_json)) + else: + try: + tx = TransactionRecord.from_json_dict_convenience(transaction_hexstr_or_json) + except AttributeError: + tx = TransactionRecord.from_json_dict(transaction_hexstr_or_json) + transactions.append(tx) + + json_dict["transactions"] = [tx.to_json_dict() for tx in transactions] + return super().from_json_dict(json_dict) + + +@streamable +@dataclass(frozen=True) +class PushTransactionsResponse(TransactionEndpointResponse): + pass + + @streamable @kw_only_dataclass class SplitCoins(TransactionEndpointRequest): diff --git a/chia/rpc/wallet_rpc_api.py b/chia/rpc/wallet_rpc_api.py index 7bb2f2749481..8328425b11f3 100644 --- a/chia/rpc/wallet_rpc_api.py +++ b/chia/rpc/wallet_rpc_api.py @@ -35,6 +35,7 @@ GatherSigningInfo, GatherSigningInfoResponse, GenerateMnemonicResponse, + GetHeightInfoResponse, GetLoggedInFingerprintResponse, GetNotifications, GetNotificationsResponse, @@ -42,8 +43,15 @@ GetPrivateKeyFormat, GetPrivateKeyResponse, GetPublicKeysResponse, + GetSyncStatusResponse, + GetTimestampForHeight, + GetTimestampForHeightResponse, LogIn, LogInResponse, + PushTransactions, + PushTransactionsResponse, + PushTX, + SetWalletResyncOnStartup, SplitCoins, SplitCoinsResponse, SubmitTransactions, @@ -594,64 +602,55 @@ async def delete_all_keys(self, request: Empty) -> Empty: ########################################################################################## # Wallet Node ########################################################################################## - async def set_wallet_resync_on_startup(self, request: Dict[str, Any]) -> Dict[str, Any]: + @marshal + async def set_wallet_resync_on_startup(self, request: SetWalletResyncOnStartup) -> Empty: """ Resync the current logged in wallet. The transaction and offer records will be kept. :param request: optionally pass in `enable` as bool to enable/disable resync :return: """ assert self.service.wallet_state_manager is not None - try: - enable = bool(request.get("enable", True)) - except ValueError: - raise ValueError("Please provide a boolean value for `enable` parameter in request") fingerprint = self.service.logged_in_fingerprint if fingerprint is not None: - self.service.set_resync_on_startup(fingerprint, enable) + self.service.set_resync_on_startup(fingerprint, request.enable) else: raise ValueError("You need to login into wallet to use this RPC call") - return {"success": True} + return Empty() - async def get_sync_status(self, request: Dict[str, Any]) -> EndpointResult: + @marshal + async def get_sync_status(self, request: Empty) -> GetSyncStatusResponse: sync_mode = self.service.wallet_state_manager.sync_mode has_pending_queue_items = self.service.new_peak_queue.has_pending_data_process_items() syncing = sync_mode or has_pending_queue_items synced = await self.service.wallet_state_manager.synced() - return {"synced": synced, "syncing": syncing, "genesis_initialized": True} + return GetSyncStatusResponse(synced=synced, syncing=syncing) - async def get_height_info(self, request: Dict[str, Any]) -> EndpointResult: + @marshal + async def get_height_info(self, request: Empty) -> GetHeightInfoResponse: height = await self.service.wallet_state_manager.blockchain.get_finished_sync_up_to() - return {"height": height} + return GetHeightInfoResponse(height=height) - async def push_tx(self, request: Dict[str, Any]) -> EndpointResult: + @marshal + async def push_tx(self, request: PushTX) -> Empty: nodes = self.service.server.get_connections(NodeType.FULL_NODE) if len(nodes) == 0: raise ValueError("Wallet is not currently connected to any full node peers") - await self.service.push_tx(WalletSpendBundle.from_bytes(hexstr_to_bytes(request["spend_bundle"]))) - return {} + await self.service.push_tx(WalletSpendBundle.from_bytes(request.spend_bundle)) + return Empty() @tx_endpoint(push=True) + @marshal async def push_transactions( self, - request: Dict[str, Any], + request: PushTransactions, action_scope: WalletActionScope, extra_conditions: Tuple[Condition, ...] = tuple(), - ) -> EndpointResult: + ) -> PushTransactionsResponse: if not action_scope.config.push: raise ValueError("Cannot push transactions if push is False") async with action_scope.use() as interface: - for transaction_hexstr_or_json in request["transactions"]: - if isinstance(transaction_hexstr_or_json, str): - tx = TransactionRecord.from_bytes(hexstr_to_bytes(transaction_hexstr_or_json)) - interface.side_effects.transactions.append(tx) - else: - try: - tx = TransactionRecord.from_json_dict_convenience(transaction_hexstr_or_json) - except AttributeError: - tx = TransactionRecord.from_json_dict(transaction_hexstr_or_json) - interface.side_effects.transactions.append(tx) - - if request.get("fee", 0) != 0: + interface.side_effects.transactions.extend(request.transactions) + if request.fee != 0: all_conditions_and_origins = [ (condition, cs.coin.name()) for tx in interface.side_effects.transactions @@ -682,7 +681,7 @@ async def push_transactions( push=False, ) as inner_action_scope: await self.service.wallet_state_manager.main_wallet.create_tandem_xch_tx( - uint64(request["fee"]), + request.fee, inner_action_scope, ( *extra_conditions, @@ -694,20 +693,23 @@ async def push_transactions( interface.side_effects.transactions.extend(inner_action_scope.side_effects.transactions) - return {} + return PushTransactionsResponse([], []) # tx_endpoint takes care of this - async def get_timestamp_for_height(self, request: Dict[str, Any]) -> EndpointResult: - return {"timestamp": await self.service.get_timestamp_for_height(uint32(request["height"]))} + @marshal + async def get_timestamp_for_height(self, request: GetTimestampForHeight) -> GetTimestampForHeightResponse: + return GetTimestampForHeightResponse(await self.service.get_timestamp_for_height(request.height)) - async def set_auto_claim(self, request: Dict[str, Any]) -> EndpointResult: + @marshal + async def set_auto_claim(self, request: AutoClaimSettings) -> AutoClaimSettings: """ Set auto claim merkle coins config :param request: Example {"enable": true, "tx_fee": 100000, "min_amount": 0, "batch_size": 50} :return: """ - return self.service.set_auto_claim(AutoClaimSettings.from_json_dict(request)) + return AutoClaimSettings.from_json_dict(self.service.set_auto_claim(request)) - async def get_auto_claim(self, request: Dict[str, Any]) -> EndpointResult: + @marshal + async def get_auto_claim(self, request: Empty) -> AutoClaimSettings: """ Get auto claim merkle coins config :param request: None @@ -716,7 +718,7 @@ async def get_auto_claim(self, request: Dict[str, Any]) -> EndpointResult: auto_claim_settings = AutoClaimSettings.from_json_dict( self.service.wallet_state_manager.config.get("auto_claim", {}) ) - return auto_claim_settings.to_json_dict() + return auto_claim_settings ########################################################################################## # Wallet Management diff --git a/chia/rpc/wallet_rpc_client.py b/chia/rpc/wallet_rpc_client.py index 28347fc7bbf0..2cd2022bbd55 100644 --- a/chia/rpc/wallet_rpc_client.py +++ b/chia/rpc/wallet_rpc_client.py @@ -45,6 +45,7 @@ GatherSigningInfoResponse, GenerateMnemonicResponse, GetCATListResponse, + GetHeightInfoResponse, GetLoggedInFingerprintResponse, GetNotifications, GetNotificationsResponse, @@ -52,6 +53,9 @@ GetPrivateKey, GetPrivateKeyResponse, GetPublicKeysResponse, + GetSyncStatusResponse, + GetTimestampForHeight, + GetTimestampForHeightResponse, GetTransactionMemo, GetTransactionMemoResponse, LogIn, @@ -69,8 +73,12 @@ NFTTransferBulk, NFTTransferBulkResponse, NFTTransferNFTResponse, + PushTransactions, + PushTransactionsResponse, + PushTX, SendTransactionMultiResponse, SendTransactionResponse, + SetWalletResyncOnStartup, SplitCoins, SplitCoinsResponse, SubmitTransactions, @@ -100,7 +108,6 @@ from chia.wallet.util.wallet_types import WalletType from chia.wallet.vc_wallet.vc_store import VCRecord from chia.wallet.wallet_coin_store import GetCoinRecords -from chia.wallet.wallet_spend_bundle import WalletSpendBundle def parse_result_transactions(result: Dict[str, Any]) -> Dict[str, Any]: @@ -124,9 +131,6 @@ class WalletRpcClient(RpcClient): async def log_in(self, request: LogIn) -> LogInResponse: return LogInResponse.from_json_dict(await self.fetch("log_in", request.to_json_dict())) - async def set_wallet_resync_on_startup(self, enable: bool = True) -> Dict[str, Any]: - return await self.fetch(path="set_wallet_resync_on_startup", request_json={"enable": enable}) - async def get_logged_in_fingerprint(self) -> GetLoggedInFingerprintResponse: return GetLoggedInFingerprintResponse.from_json_dict(await self.fetch("get_logged_in_fingerprint", {})) @@ -152,39 +156,35 @@ async def delete_all_keys(self) -> None: await self.fetch("delete_all_keys", {}) # Wallet Node APIs - async def get_sync_status(self) -> bool: - response = await self.fetch("get_sync_status", {}) - # TODO: casting due to lack of type checked deserialization - return cast(bool, response["syncing"]) + async def set_wallet_resync_on_startup(self, request: SetWalletResyncOnStartup) -> None: + await self.fetch("set_wallet_resync_on_startup", request.to_json_dict()) - async def get_synced(self) -> bool: - response = await self.fetch("get_sync_status", {}) - # TODO: casting due to lack of type checked deserialization - return cast(bool, response["synced"]) + async def get_sync_status(self) -> GetSyncStatusResponse: + return GetSyncStatusResponse.from_json_dict(await self.fetch("get_sync_status", {})) - async def get_height_info(self) -> uint32: - response = await self.fetch("get_height_info", {}) - # TODO: casting due to lack of type checked deserialization - return cast(uint32, response["height"]) + async def get_height_info(self) -> GetHeightInfoResponse: + return GetHeightInfoResponse.from_json_dict(await self.fetch("get_height_info", {})) - async def push_tx(self, spend_bundle: WalletSpendBundle) -> Dict[str, Any]: - return await self.fetch("push_tx", {"spend_bundle": bytes(spend_bundle).hex()}) + async def push_tx(self, request: PushTX) -> None: + await self.fetch("push_tx", request.to_json_dict()) async def push_transactions( - self, txs: List[TransactionRecord], fee: uint64 = uint64(0), sign: bool = False - ) -> Dict[str, Any]: - transactions = [bytes(tx).hex() for tx in txs] - - return await self.fetch("push_transactions", {"transactions": transactions, "fee": fee, "sign": sign}) - - async def farm_block(self, address: str) -> Dict[str, Any]: - return await self.fetch("farm_block", {"address": address}) + self, + request: PushTransactions, + tx_config: TXConfig, + extra_conditions: Tuple[Condition, ...] = tuple(), + timelock_info: ConditionValidTimes = ConditionValidTimes(), + ) -> PushTransactionsResponse: + return PushTransactionsResponse.from_json_dict( + await self.fetch( + "push_transactions", request.json_serialize_for_transport(tx_config, extra_conditions, timelock_info) + ) + ) - async def get_timestamp_for_height(self, height: uint32) -> uint64: - request = {"height": height} - response = await self.fetch("get_timestamp_for_height", request) - # TODO: casting due to lack of type checked deserialization - return cast(uint64, response["timestamp"]) + async def get_timestamp_for_height(self, request: GetTimestampForHeight) -> GetTimestampForHeightResponse: + return GetTimestampForHeightResponse.from_json_dict( + await self.fetch("get_timestamp_for_height", request.to_json_dict()) + ) async def set_auto_claim(self, request: AutoClaimSettings) -> AutoClaimSettings: return AutoClaimSettings.from_json_dict(await self.fetch("set_auto_claim", {**request.to_json_dict()})) From b353a398fef5d04e0da45de8310e322aaddd6cef Mon Sep 17 00:00:00 2001 From: Kyle Altendorf Date: Thu, 3 Oct 2024 16:32:02 -0400 Subject: [PATCH 22/69] stop asking for zero random bits of data (#18668) --- .../core/data_layer/test_data_layer_util.py | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/chia/_tests/core/data_layer/test_data_layer_util.py b/chia/_tests/core/data_layer/test_data_layer_util.py index 5d3835fb5eb2..8e43dfec9faf 100644 --- a/chia/_tests/core/data_layer/test_data_layer_util.py +++ b/chia/_tests/core/data_layer/test_data_layer_util.py @@ -159,6 +159,13 @@ def definition(left_hash: bytes32, right_hash: bytes32) -> bytes32: assert definition(left_hash=left_hash, right_hash=right_hash) == reference +def get_random_bytes(length: int, r: Random) -> bytes: + if length == 0: + return b"" + + return r.getrandbits(length * 8).to_bytes(length, "big") + + def test_leaf_hash(seeded_random: Random) -> None: def definition(key: bytes, value: bytes) -> bytes32: return SerializedProgram.to((key, value)).get_tree_hash() @@ -169,12 +176,14 @@ def definition(key: bytes, value: bytes) -> bytes32: length = 0 else: length = seeded_random.randrange(100) - key = seeded_random.getrandbits(length * 8).to_bytes(length, "big") + + key = get_random_bytes(length=length, r=seeded_random) + if cycle in (1, 2): length = 0 else: length = seeded_random.randrange(100) - value = seeded_random.getrandbits(length * 8).to_bytes(length, "big") + value = get_random_bytes(length=length, r=seeded_random) reference = definition(key=key, value=value) data.append((key, value, reference)) @@ -197,7 +206,7 @@ def definition(key: bytes) -> bytes32: length = 0 else: length = seeded_random.randrange(100) - key = seeded_random.getrandbits(length * 8).to_bytes(length, "big") + key = get_random_bytes(length=length, r=seeded_random) reference = definition(key=key) data.append((key, reference)) From 74536ba7813bbf91746ca95ab0065c8bbcb1f608 Mon Sep 17 00:00:00 2001 From: wjblanke Date: Fri, 4 Oct 2024 09:00:29 -0700 Subject: [PATCH 23/69] CHIA-1568: fix object has no attribute code errors (#18665) * fix object has no attribute code errors * fix --- chia/server/ws_connection.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/chia/server/ws_connection.py b/chia/server/ws_connection.py index 50b527ad4547..7d1e3c3b5b78 100644 --- a/chia/server/ws_connection.py +++ b/chia/server/ws_connection.py @@ -8,7 +8,7 @@ from dataclasses import dataclass, field from typing import Any, Awaitable, Callable, Dict, List, Optional, Set, Tuple, Union -from aiohttp import ClientSession, WSCloseCode, WSMessage, WSMsgType +from aiohttp import ClientSession, WebSocketError, WSCloseCode, WSMessage, WSMsgType from aiohttp.client import ClientWebSocketResponse from aiohttp.web import WebSocketResponse from packaging.version import Version @@ -708,7 +708,7 @@ async def _read_one_message(self) -> Optional[Message]: return full_message_loaded elif message.type == WSMsgType.ERROR: self.log.error(f"WebSocket Error: {message}") - if message.data.code == WSCloseCode.MESSAGE_TOO_BIG: + if isinstance(message.data, WebSocketError) and message.data.code == WSCloseCode.MESSAGE_TOO_BIG: asyncio.create_task(self.close(300)) else: asyncio.create_task(self.close()) From 2efb374b74893ef5b5c707cf08b55399a430c29b Mon Sep 17 00:00:00 2001 From: Amine Khaldi Date: Fri, 4 Oct 2024 17:00:51 +0100 Subject: [PATCH 24/69] CHIA-1570 Annotate generator_tools_testing.py (#18669) Annotate generator_tools_testing.py. --- chia/_tests/util/generator_tools_testing.py | 2 +- mypy-exclusions.txt | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/chia/_tests/util/generator_tools_testing.py b/chia/_tests/util/generator_tools_testing.py index f06596ed659f..19825b4004c1 100644 --- a/chia/_tests/util/generator_tools_testing.py +++ b/chia/_tests/util/generator_tools_testing.py @@ -19,7 +19,7 @@ def run_and_get_removals_and_additions( *, height: uint32, constants: ConsensusConstants = DEFAULT_CONSTANTS, - mempool_mode=False, + mempool_mode: bool = False, ) -> Tuple[List[bytes32], List[Coin]]: removals: List[bytes32] = [] additions: List[Coin] = [] diff --git a/mypy-exclusions.txt b/mypy-exclusions.txt index 090040cf1778..0c0cc8136bf9 100644 --- a/mypy-exclusions.txt +++ b/mypy-exclusions.txt @@ -86,7 +86,6 @@ chia._tests.pools.test_wallet_pool_store chia._tests.simulation.test_simulation chia._tests.tools.test_run_block chia._tests.util.benchmark_cost -chia._tests.util.generator_tools_testing chia._tests.util.test_full_block_utils chia._tests.util.test_misc chia._tests.util.test_network From 0912c377e71c6ddc36f50c0d73688d503921452a Mon Sep 17 00:00:00 2001 From: Earle Lowe <30607889+emlowe@users.noreply.github.com> Date: Fri, 4 Oct 2024 15:34:12 -0700 Subject: [PATCH 25/69] CHIA-1567: Update aiohttp to 3.10.4 (#18663) Update aiohttp to 3.10.4 --- poetry.lock | 156 ++++++++++++++++++++++++------------------------- pyproject.toml | 2 +- 2 files changed, 79 insertions(+), 79 deletions(-) diff --git a/poetry.lock b/poetry.lock index d9fd48e6e08e..f29db50c4547 100644 --- a/poetry.lock +++ b/poetry.lock @@ -24,87 +24,87 @@ files = [ [[package]] name = "aiohttp" -version = "3.10.2" +version = "3.10.4" description = "Async http client/server framework (asyncio)" optional = false python-versions = ">=3.8" files = [ - {file = "aiohttp-3.10.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:95213b3d79c7e387144e9cb7b9d2809092d6ff2c044cb59033aedc612f38fb6d"}, - {file = "aiohttp-3.10.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1aa005f060aff7124cfadaa2493f00a4e28ed41b232add5869e129a2e395935a"}, - {file = "aiohttp-3.10.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eabe6bf4c199687592f5de4ccd383945f485779c7ffb62a9b9f1f8a3f9756df8"}, - {file = "aiohttp-3.10.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96e010736fc16d21125c7e2dc5c350cd43c528b85085c04bf73a77be328fe944"}, - {file = "aiohttp-3.10.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99f81f9c1529fd8e03be4a7bd7df32d14b4f856e90ef6e9cbad3415dbfa9166c"}, - {file = "aiohttp-3.10.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d611d1a01c25277bcdea06879afbc11472e33ce842322496b211319aa95441bb"}, - {file = "aiohttp-3.10.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e00191d38156e09e8c81ef3d75c0d70d4f209b8381e71622165f22ef7da6f101"}, - {file = "aiohttp-3.10.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74c091a5ded6cb81785de2d7a8ab703731f26de910dbe0f3934eabef4ae417cc"}, - {file = "aiohttp-3.10.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:18186a80ec5a701816adbf1d779926e1069392cf18504528d6e52e14b5920525"}, - {file = "aiohttp-3.10.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5a7ceb2a0d2280f23a02c64cd0afdc922079bb950400c3dd13a1ab2988428aac"}, - {file = "aiohttp-3.10.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8bd7be6ff6c162a60cb8fce65ee879a684fbb63d5466aba3fa5b9288eb04aefa"}, - {file = "aiohttp-3.10.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:fae962b62944eaebff4f4fddcf1a69de919e7b967136a318533d82d93c3c6bd1"}, - {file = "aiohttp-3.10.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a0fde16d284efcacbe15fb0c1013f0967b6c3e379649239d783868230bf1db42"}, - {file = "aiohttp-3.10.2-cp310-cp310-win32.whl", hash = "sha256:f81cd85a0e76ec7b8e2b6636fe02952d35befda4196b8c88f3cec5b4fb512839"}, - {file = "aiohttp-3.10.2-cp310-cp310-win_amd64.whl", hash = "sha256:54ba10eb5a3481c28282eb6afb5f709aedf53cf9c3a31875ffbdc9fc719ffd67"}, - {file = "aiohttp-3.10.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:87fab7f948e407444c2f57088286e00e2ed0003ceaf3d8f8cc0f60544ba61d91"}, - {file = "aiohttp-3.10.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ec6ad66ed660d46503243cbec7b2b3d8ddfa020f984209b3b8ef7d98ce69c3f2"}, - {file = "aiohttp-3.10.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a4be88807283bd96ae7b8e401abde4ca0bab597ba73b5e9a2d98f36d451e9aac"}, - {file = "aiohttp-3.10.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01c98041f90927c2cbd72c22a164bb816fa3010a047d264969cf82e1d4bcf8d1"}, - {file = "aiohttp-3.10.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:54e36c67e1a9273ecafab18d6693da0fb5ac48fd48417e4548ac24a918c20998"}, - {file = "aiohttp-3.10.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7de3ddb6f424af54535424082a1b5d1ae8caf8256ebd445be68c31c662354720"}, - {file = "aiohttp-3.10.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7dd9c7db94b4692b827ce51dcee597d61a0e4f4661162424faf65106775b40e7"}, - {file = "aiohttp-3.10.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e57e21e1167705f8482ca29cc5d02702208d8bf4aff58f766d94bcd6ead838cd"}, - {file = "aiohttp-3.10.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a1a50e59b720060c29e2951fd9f13c01e1ea9492e5a527b92cfe04dd64453c16"}, - {file = "aiohttp-3.10.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:686c87782481fda5ee6ba572d912a5c26d9f98cc5c243ebd03f95222af3f1b0f"}, - {file = "aiohttp-3.10.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:dafb4abb257c0ed56dc36f4e928a7341b34b1379bd87e5a15ce5d883c2c90574"}, - {file = "aiohttp-3.10.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:494a6f77560e02bd7d1ab579fdf8192390567fc96a603f21370f6e63690b7f3d"}, - {file = "aiohttp-3.10.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6fe8503b1b917508cc68bf44dae28823ac05e9f091021e0c41f806ebbb23f92f"}, - {file = "aiohttp-3.10.2-cp311-cp311-win32.whl", hash = "sha256:4ddb43d06ce786221c0dfd3c91b4892c318eaa36b903f7c4278e7e2fa0dd5102"}, - {file = "aiohttp-3.10.2-cp311-cp311-win_amd64.whl", hash = "sha256:ca2f5abcb0a9a47e56bac173c01e9f6c6e7f27534d91451c5f22e6a35a5a2093"}, - {file = "aiohttp-3.10.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:14eb6b17f6246959fb0b035d4f4ae52caa870c4edfb6170aad14c0de5bfbf478"}, - {file = "aiohttp-3.10.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:465e445ec348d4e4bd349edd8b22db75f025da9d7b6dc1369c48e7935b85581e"}, - {file = "aiohttp-3.10.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:341f8ece0276a828d95b70cd265d20e257f5132b46bf77d759d7f4e0443f2906"}, - {file = "aiohttp-3.10.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c01fbb87b5426381cd9418b3ddcf4fc107e296fa2d3446c18ce6c76642f340a3"}, - {file = "aiohttp-3.10.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2c474af073e1a6763e1c5522bbb2d85ff8318197e4c6c919b8d7886e16213345"}, - {file = "aiohttp-3.10.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d9076810a5621236e29b2204e67a68e1fe317c8727ee4c9abbfbb1083b442c38"}, - {file = "aiohttp-3.10.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8f515d6859e673940e08de3922b9c4a2249653b0ac181169313bd6e4b1978ac"}, - {file = "aiohttp-3.10.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:655e583afc639bef06f3b2446972c1726007a21003cd0ef57116a123e44601bc"}, - {file = "aiohttp-3.10.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8da9449a575133828cc99985536552ea2dcd690e848f9d41b48d8853a149a959"}, - {file = "aiohttp-3.10.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:19073d57d0feb1865d12361e2a1f5a49cb764bf81a4024a3b608ab521568093a"}, - {file = "aiohttp-3.10.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c8e98e1845805f184d91fda6f9ab93d7c7b0dddf1c07e0255924bfdb151a8d05"}, - {file = "aiohttp-3.10.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:377220a5efde6f9497c5b74649b8c261d3cce8a84cb661be2ed8099a2196400a"}, - {file = "aiohttp-3.10.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:92f7f4a4dc9cdb5980973a74d43cdbb16286dacf8d1896b6c3023b8ba8436f8e"}, - {file = "aiohttp-3.10.2-cp312-cp312-win32.whl", hash = "sha256:9bb2834a6f11d65374ce97d366d6311a9155ef92c4f0cee543b2155d06dc921f"}, - {file = "aiohttp-3.10.2-cp312-cp312-win_amd64.whl", hash = "sha256:518dc3cb37365255708283d1c1c54485bbacccd84f0a0fb87ed8917ba45eda5b"}, - {file = "aiohttp-3.10.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:7f98e70bbbf693086efe4b86d381efad8edac040b8ad02821453083d15ec315f"}, - {file = "aiohttp-3.10.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9f6f0b252a009e98fe84028a4ec48396a948e7a65b8be06ccfc6ef68cf1f614d"}, - {file = "aiohttp-3.10.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9360e3ffc7b23565600e729e8c639c3c50d5520e05fdf94aa2bd859eef12c407"}, - {file = "aiohttp-3.10.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3988044d1635c7821dd44f0edfbe47e9875427464e59d548aece447f8c22800a"}, - {file = "aiohttp-3.10.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30a9d59da1543a6f1478c3436fd49ec59be3868bca561a33778b4391005e499d"}, - {file = "aiohttp-3.10.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f9f49bdb94809ac56e09a310a62f33e5f22973d6fd351aac72a39cd551e98194"}, - {file = "aiohttp-3.10.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddfd2dca3f11c365d6857a07e7d12985afc59798458a2fdb2ffa4a0332a3fd43"}, - {file = "aiohttp-3.10.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:685c1508ec97b2cd3e120bfe309a4ff8e852e8a7460f1ef1de00c2c0ed01e33c"}, - {file = "aiohttp-3.10.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:49904f38667c44c041a0b44c474b3ae36948d16a0398a8f8cd84e2bb3c42a069"}, - {file = "aiohttp-3.10.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:352f3a4e5f11f3241a49b6a48bc5b935fabc35d1165fa0d87f3ca99c1fcca98b"}, - {file = "aiohttp-3.10.2-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:fc61f39b534c5d5903490478a0dd349df397d2284a939aa3cbaa2fb7a19b8397"}, - {file = "aiohttp-3.10.2-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:ad2274e707be37420d0b6c3d26a8115295fe9d8e6e530fa6a42487a8ca3ad052"}, - {file = "aiohttp-3.10.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c836bf3c7512100219fe1123743fd8dd9a2b50dd7cfb0c3bb10d041309acab4b"}, - {file = "aiohttp-3.10.2-cp38-cp38-win32.whl", hash = "sha256:53e8898adda402be03ff164b0878abe2d884e3ea03a4701e6ad55399d84b92dc"}, - {file = "aiohttp-3.10.2-cp38-cp38-win_amd64.whl", hash = "sha256:7cc8f65f5b22304693de05a245b6736b14cb5bc9c8a03da6e2ae9ef15f8b458f"}, - {file = "aiohttp-3.10.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9dfc906d656e14004c5bc672399c1cccc10db38df2b62a13fb2b6e165a81c316"}, - {file = "aiohttp-3.10.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:91b10208b222ddf655c3a3d5b727879d7163db12b634492df41a9182a76edaae"}, - {file = "aiohttp-3.10.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9fd16b5e1a7bdd14668cd6bde60a2a29b49147a535c74f50d8177d11b38433a7"}, - {file = "aiohttp-3.10.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2bfdda4971bd79201f59adbad24ec2728875237e1c83bba5221284dbbf57bda"}, - {file = "aiohttp-3.10.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:69d73f869cf29e8a373127fc378014e2b17bcfbe8d89134bc6fb06a2f67f3cb3"}, - {file = "aiohttp-3.10.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df59f8486507c421c0620a2c3dce81fbf1d54018dc20ff4fecdb2c106d6e6abc"}, - {file = "aiohttp-3.10.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0df930015db36b460aa9badbf35eccbc383f00d52d4b6f3de2ccb57d064a6ade"}, - {file = "aiohttp-3.10.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:562b1153ab7f766ee6b8b357ec777a302770ad017cf18505d34f1c088fccc448"}, - {file = "aiohttp-3.10.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d984db6d855de58e0fde1ef908d48fe9a634cadb3cf715962722b4da1c40619d"}, - {file = "aiohttp-3.10.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:14dc3fcb0d877911d775d511eb617a486a8c48afca0a887276e63db04d3ee920"}, - {file = "aiohttp-3.10.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:b52a27a5c97275e254704e1049f4b96a81e67d6205f52fa37a4777d55b0e98ef"}, - {file = "aiohttp-3.10.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:cd33d9de8cfd006a0d0fe85f49b4183c57e91d18ffb7e9004ce855e81928f704"}, - {file = "aiohttp-3.10.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1238fc979160bc03a92fff9ad021375ff1c8799c6aacb0d8ea1b357ea40932bb"}, - {file = "aiohttp-3.10.2-cp39-cp39-win32.whl", hash = "sha256:e2f43d238eae4f0b04f58d4c0df4615697d4ca3e9f9b1963d49555a94f0f5a04"}, - {file = "aiohttp-3.10.2-cp39-cp39-win_amd64.whl", hash = "sha256:947847f07a8f81d7b39b2d0202fd73e61962ebe17ac2d8566f260679e467da7b"}, - {file = "aiohttp-3.10.2.tar.gz", hash = "sha256:4d1f694b5d6e459352e5e925a42e05bac66655bfde44d81c59992463d2897014"}, + {file = "aiohttp-3.10.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:81037ddda8cc0a95c6d8c1b9029d0b19a62db8770c0e239e3bea0109d294ab66"}, + {file = "aiohttp-3.10.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:71944d4f4090afc07ce96b7029d5a574240e2f39570450df4af0d5b93a5ee64a"}, + {file = "aiohttp-3.10.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c774f08afecc0a617966f45a9c378456e713a999ee60654d9727617def3e4ee4"}, + {file = "aiohttp-3.10.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc990e73613c78ab2930b60266135066f37fdfce6b32dd604f42c5c377ee880a"}, + {file = "aiohttp-3.10.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6acd1a908740f708358d240f9a3243cec31a456e3ded65c2cb46f6043bc6735"}, + {file = "aiohttp-3.10.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6075e27e7e54fbcd1c129c5699b2d251c885c9892e26d59a0fb7705141c2d14b"}, + {file = "aiohttp-3.10.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc98d93d11d860ac823beb6131f292d82efb76f226b5e28a3eab1ec578dfd041"}, + {file = "aiohttp-3.10.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:201ddf1471567568be381b6d4701e266a768f7eaa2f99ef753f2c9c5e1e3fb5c"}, + {file = "aiohttp-3.10.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7d202ec55e61f06b1a1eaf317fba7546855cbf803c13ce7625d462fb8c88e238"}, + {file = "aiohttp-3.10.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:96b2e7c110a941c8c1a692703b8ac1013e47f17ee03356c71d55c0a54de2ce38"}, + {file = "aiohttp-3.10.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8ba0fbc56c44883bd757ece433f9caadbca67f565934afe9bc53ba3bd99cc368"}, + {file = "aiohttp-3.10.4-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:46cc9069da466652bb7b8b3fac1f8ce2e12a9dc0fb11551faa420c4cdbc60abf"}, + {file = "aiohttp-3.10.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:93a19cd1e9dc703257fda78b8e889c3a08eabaa09f6ff0d867850b03964f80d1"}, + {file = "aiohttp-3.10.4-cp310-cp310-win32.whl", hash = "sha256:8593040bcc8075fc0e817a602bc5d3d74c7bd717619ffc175a8ba0188edebadf"}, + {file = "aiohttp-3.10.4-cp310-cp310-win_amd64.whl", hash = "sha256:326fb5228aadfc395981d9b336d56a698da335897c4143105c73b583d7500839"}, + {file = "aiohttp-3.10.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:dfe48f477e02ef5ab247c6ac431a6109c69b5c24cb3ccbcd3e27c4fb39691fe4"}, + {file = "aiohttp-3.10.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f6fe78b51852e25d4e20be51ef88c2a0bf31432b9f2223bdbd61c01a0f9253a7"}, + {file = "aiohttp-3.10.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5cc75ff5efbd92301e63a157fddb18a6964a3f40e31c77d57e97dbb9bb3373b4"}, + {file = "aiohttp-3.10.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dca39391f45fbb28daa6412f98c625265bf6b512cc41382df61672d1b242f8f4"}, + {file = "aiohttp-3.10.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8616dd5ed8b3b4029021b560305041c62e080bb28f238c27c2e150abe3539587"}, + {file = "aiohttp-3.10.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9d7958ba22854b3f00a7bbb66cde1dc759760ce8a3e6dfe9ea53f06bccaa9aa2"}, + {file = "aiohttp-3.10.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a24ac7164a824ef2e8e4e9a9f6debb1f43c44ad7ad04efc6018a6610555666d"}, + {file = "aiohttp-3.10.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:660ad010b8fd0b26e8edb8ae5c036db5b16baac4278198ad238b11956d920b3d"}, + {file = "aiohttp-3.10.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:93ee83008d3e505db9846a5a1f48a002676d8dcc90ee431a9462541c9b81393c"}, + {file = "aiohttp-3.10.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:77071795efd6ba87f409001141fb05c94ee962b9fca6c8fa1f735c2718512de4"}, + {file = "aiohttp-3.10.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:ff371ae72a1816c3eeba5c9cff42cb739aaa293fec7d78f180d1c7ee342285b6"}, + {file = "aiohttp-3.10.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:c253e81f12da97f85d45441e8c6da0d9c12e07db4a7136b0a955df6fc5e4bf51"}, + {file = "aiohttp-3.10.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2ce101c447cf7ba4b6e5ab07bfa2c0da21cbab66922f78a601f0b84fd7710d72"}, + {file = "aiohttp-3.10.4-cp311-cp311-win32.whl", hash = "sha256:705c311ecf2d30fbcf3570d1a037c657be99095694223488140c47dee4ef2460"}, + {file = "aiohttp-3.10.4-cp311-cp311-win_amd64.whl", hash = "sha256:ebddbfea8a8d6b97f717658fa85a96681a28990072710d3de3a4eba5d6804a37"}, + {file = "aiohttp-3.10.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:fe4d63f42d9c604521b208b754abfafe01218af4a8f6332b43196ee8fe88bbd5"}, + {file = "aiohttp-3.10.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fef7b7bd3a6911b4d148332136d34d3c2aee3d54d354373b1da6d96bc08089a5"}, + {file = "aiohttp-3.10.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fff8606149098935188fe1e135f7e7991e6a36d6fe394fd15939fc57d0aff889"}, + {file = "aiohttp-3.10.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9eb3df1aa83602be9a5e572c834d74c3c8e382208b59a873aabfe4c493c45ed0"}, + {file = "aiohttp-3.10.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c4a71d4a5e0cbfd4bfadd13cb84fe2bc76c64d550dc4f22c22008c9354cffb3"}, + {file = "aiohttp-3.10.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf61884a604c399458c4a42c8caea000fbcc44255ed89577ff50cb688a0fe8e2"}, + {file = "aiohttp-3.10.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2015e4b40bd5dedc8155c2b2d24a2b07963ae02b5772373d0b599a68e38a316b"}, + {file = "aiohttp-3.10.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b06e1a66bf0a1a2d0f12aef25843dfd2093df080d6c1acbc43914bb9c8f36ed3"}, + {file = "aiohttp-3.10.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:eb898c9ad5a1228a669ebe2e2ba3d76aebe1f7c10b78f09a36000254f049fc2b"}, + {file = "aiohttp-3.10.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:2d64a5a7539320c3cecb4bca093ea825fcc906f8461cf8b42a7bf3c706ce1932"}, + {file = "aiohttp-3.10.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:438c6e1492d060b21285f4b6675b941cf96dd9ef3dfdd59940561029b82e3e1f"}, + {file = "aiohttp-3.10.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:e99bf118afb2584848dba169a685fe092b338a4fe52ae08c7243d7bc4cc204fe"}, + {file = "aiohttp-3.10.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9dc26781fb95225c6170619dece8b5c6ca7cfb1b0be97b7ee719915773d0c2a9"}, + {file = "aiohttp-3.10.4-cp312-cp312-win32.whl", hash = "sha256:45bb655cb8b3a61e19977183a4e0962051ae90f6d46588ed4addb8232128141c"}, + {file = "aiohttp-3.10.4-cp312-cp312-win_amd64.whl", hash = "sha256:347bbdc48411badc24fe3a13565820bc742db3aa2f9127cd5f48c256caf87e29"}, + {file = "aiohttp-3.10.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:4ad284cee0fdcdc0216346b849fd53d201b510aff3c48aa3622daec9ada4bf80"}, + {file = "aiohttp-3.10.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:58df59234be7d7e80548b9482ebfeafdda21948c25cb2873c7f23870c8053dfe"}, + {file = "aiohttp-3.10.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5f52225af7f91f27b633f73473e9ef0aa8e2112d57b69eaf3aa4479e3ea3bc0e"}, + {file = "aiohttp-3.10.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93f1a0e12c321d923c024b56d7dcd8012e60bf30a4b3fb69a88be15dcb9ab80b"}, + {file = "aiohttp-3.10.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9e9e9a51dd12f2f71fdbd7f7230dcb75ed8f77d8ac8e07c73b599b6d7027e5c"}, + {file = "aiohttp-3.10.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:38bb515f1affc36d3d97b02bf82099925a5785c4a96066ff4400a83ad09d3d5d"}, + {file = "aiohttp-3.10.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e685afb0e3b7b861d89cb3690d89eeda221b43095352efddaaa735c6baf87f3"}, + {file = "aiohttp-3.10.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd5673e3391564871ba6753cf674dcf2051ef19dc508998fe0758a6c7b429a0"}, + {file = "aiohttp-3.10.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:4b34e5086e1ead3baa740e32adf35cc5e42338e44c4b07f7b62b41ca6d6a5bfd"}, + {file = "aiohttp-3.10.4-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:c3fd3b8f0164fb2866400cd6eb9e884ab0dc95f882cf8b25e560ace7350c552d"}, + {file = "aiohttp-3.10.4-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:b95e1694d234f27b4bbf5bdef56bb751974ac5dbe045b1e462bde1fe39421cbe"}, + {file = "aiohttp-3.10.4-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:c031de4dfabe7bb6565743745ab43d20588944ddfc7233360169cab4008eee2f"}, + {file = "aiohttp-3.10.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:03c5a3143d4a82c43a3d82ac77d9cdef527a72f1c04dcca7b14770879f33d196"}, + {file = "aiohttp-3.10.4-cp38-cp38-win32.whl", hash = "sha256:b71722b527445e02168e2d1cf435772731874671a647fa159ad000feea7933b6"}, + {file = "aiohttp-3.10.4-cp38-cp38-win_amd64.whl", hash = "sha256:0fd1f57aac7d01c9c768675d531976d20d5b79d9da67fac87e55d41b4ade05f9"}, + {file = "aiohttp-3.10.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:15b36a644d1f44ea3d94a0bbb71e75d5f394a3135dc388a209466e22b711ce64"}, + {file = "aiohttp-3.10.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:394ddf9d216cf0bd429b223239a0ab628f01a7a1799c93ce4685eedcdd51b9bc"}, + {file = "aiohttp-3.10.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dd33f4d571b4143fc9318c3d9256423579c7d183635acc458a6db81919ae5204"}, + {file = "aiohttp-3.10.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5991b80886655e6c785aadf3114d4f86e6bec2da436e2bb62892b9f048450a4"}, + {file = "aiohttp-3.10.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:92021bf0a4b9ad16851a6c1ca3c86e5b09aecca4f7a2576430c6bbf3114922b1"}, + {file = "aiohttp-3.10.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:938e37fd337343c67471098736deb33066d72cec7d8927b9c1b6b4ea807ade9e"}, + {file = "aiohttp-3.10.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d697023b16c62f9aeb3ffdfb8ec4ac3afd477388993b9164b47dadbd60e7062"}, + {file = "aiohttp-3.10.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c2f9f07fe6d0d51bd2a788cbb339f1570fd691449c53b5dec83ff838f117703e"}, + {file = "aiohttp-3.10.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:50ac670f3fc13ce95e4d6d5a299db9288cc84c663aa630142444ef504756fcf7"}, + {file = "aiohttp-3.10.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:9bcdd19398212785a9cb82a63a4b75a299998343f3f5732dfd37c1a4275463f9"}, + {file = "aiohttp-3.10.4-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:122c26f0976225aba46f381e3cabb5ef89a08af6503fc30493fb732e578cfa55"}, + {file = "aiohttp-3.10.4-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:d0665e2a346b6b66959f831ffffd8aa71dd07dd2300017d478f5b47573e66cfe"}, + {file = "aiohttp-3.10.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:625a4a9d4b9f80e7bbaaf2ace06341cf701b2fee54232843addf0bb7304597fb"}, + {file = "aiohttp-3.10.4-cp39-cp39-win32.whl", hash = "sha256:5115490112f39f16ae87c1b34dff3e2c95306cf456b1d2af5974c4ac7d2d1ec7"}, + {file = "aiohttp-3.10.4-cp39-cp39-win_amd64.whl", hash = "sha256:9b58b2ef7f28a2462ba86acbf3b20371bd80a1faa1cfd82f31968af4ac81ef25"}, + {file = "aiohttp-3.10.4.tar.gz", hash = "sha256:23a5f97e7dd22e181967fb6cb6c3b11653b0fdbbc4bb7739d9b6052890ccab96"}, ] [package.dependencies] @@ -3457,4 +3457,4 @@ upnp = ["miniupnpc"] [metadata] lock-version = "2.0" python-versions = ">=3.8.10, <3.13" -content-hash = "dace0d72adc211850f2bcf7c41177908240702ee7aedb45683ccd5610ed12e7e" +content-hash = "47fce9cd8892399d3803fe73442a8875d79b2439e9462aaab6cf659cf16b2e2f" diff --git a/pyproject.toml b/pyproject.toml index 44836732f06b..f8bd6b1b1655 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -39,7 +39,7 @@ priority = "supplemental" [tool.poetry.dependencies] python = ">=3.8.10, <3.13" aiofiles = "24.1.0" # Async IO for files -aiohttp = "3.10.2" # HTTP server for full node rpc +aiohttp = "3.10.4" # HTTP server for full node rpc aiosqlite = "0.20.0" # asyncio wrapper for sqlite, to store blocks anyio = "4.3.0" bitstring = "4.1.4" # Binary data management library From b5987621d0e06087cbfc73fef40a61e41da39f2f Mon Sep 17 00:00:00 2001 From: Earle Lowe <30607889+emlowe@users.noreply.github.com> Date: Fri, 4 Oct 2024 15:34:26 -0700 Subject: [PATCH 26/69] Update GUI to 7e6f90e (#18671) Update chia-blockchain-gui to 7e6f90e --- chia-blockchain-gui | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/chia-blockchain-gui b/chia-blockchain-gui index 71bae1c07294..7e6f90ed1bbd 160000 --- a/chia-blockchain-gui +++ b/chia-blockchain-gui @@ -1 +1 @@ -Subproject commit 71bae1c072941217cde26a70e864d9add1de07f9 +Subproject commit 7e6f90ed1bbda7dcc9181a7b424a82e4fb473901 From a8103844487f4c6ea064599f329f6c8fd9c5d21b Mon Sep 17 00:00:00 2001 From: Amine Khaldi Date: Mon, 7 Oct 2024 19:08:44 +0100 Subject: [PATCH 27/69] CHIA-1564 Simplify WalletSpendBundle class by leveraging some parent class methods (#18666) Simplify WalletSpendBundle class by leveraging some parent class methods. --- chia/wallet/wallet_spend_bundle.py | 17 +---------------- 1 file changed, 1 insertion(+), 16 deletions(-) diff --git a/chia/wallet/wallet_spend_bundle.py b/chia/wallet/wallet_spend_bundle.py index 68696baf5f79..44eca406b3e7 100644 --- a/chia/wallet/wallet_spend_bundle.py +++ b/chia/wallet/wallet_spend_bundle.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Any, Dict, List, Tuple +from typing import List from chia_rs import AugSchemeMPL, G2Element @@ -12,21 +12,6 @@ class WalletSpendBundle(SpendBundle): - @classmethod - def from_bytes(cls, bytes: bytes) -> WalletSpendBundle: - sb = SpendBundle.from_bytes(bytes) - return cls(sb.coin_spends, sb.aggregated_signature) - - @classmethod - def parse_rust(cls, blob: bytes, flag: bool = False) -> Tuple[WalletSpendBundle, int]: - bundle, advance = super(WalletSpendBundle, WalletSpendBundle).parse_rust(blob) - return cls(bundle.coin_spends, bundle.aggregated_signature), advance - - @classmethod - def from_json_dict(cls, json_dict: Dict[str, Any]) -> WalletSpendBundle: - sb = SpendBundle.from_json_dict(json_dict) - return cls(sb.coin_spends, sb.aggregated_signature) - @classmethod def aggregate(cls, spend_bundles: List[T_SpendBundle]) -> WalletSpendBundle: coin_spends: List[CoinSpend] = [] From e7241ebfe03aba74c618b35c0b28749f78621dcd Mon Sep 17 00:00:00 2001 From: Arvid Norberg Date: Mon, 7 Oct 2024 20:09:15 +0200 Subject: [PATCH 28/69] simplify passing around difficulty, sub slot iterators and prev_ses_block (#18660) * combine current sub slot iterations, current difficulty and previous ses block into ChainState, to make it easier to pass around and update * address review comments * address review comments. rename ChainState -> ValidationState. add more comments --- .../blockchain/blockchain_test_utils.py | 5 +- chia/_tests/blockchain/test_blockchain.py | 49 +++-------- chia/_tests/core/full_node/test_full_node.py | 9 +- .../test_third_party_harvesters.py | 5 +- chia/_tests/util/full_sync.py | 5 +- chia/_tests/util/misc.py | 10 +-- chia/_tests/wallet/sync/test_wallet_sync.py | 7 +- chia/consensus/multiprocess_validation.py | 31 +++---- chia/full_node/full_node.py | 84 +++++++++---------- chia/simulator/full_node_simulator.py | 11 +-- chia/types/validation_state.py | 14 ++++ tools/test_full_sync.py | 9 +- 12 files changed, 112 insertions(+), 127 deletions(-) create mode 100644 chia/types/validation_state.py diff --git a/chia/_tests/blockchain/blockchain_test_utils.py b/chia/_tests/blockchain/blockchain_test_utils.py index c4032374eed6..db59b79224be 100644 --- a/chia/_tests/blockchain/blockchain_test_utils.py +++ b/chia/_tests/blockchain/blockchain_test_utils.py @@ -9,6 +9,7 @@ from chia.consensus.difficulty_adjustment import get_next_sub_slot_iters_and_difficulty from chia.consensus.multiprocess_validation import PreValidationResult, pre_validate_blocks_multiprocessing from chia.types.full_block import FullBlock +from chia.types.validation_state import ValidationState from chia.util.errors import Err from chia.util.ints import uint32, uint64 @@ -81,9 +82,7 @@ async def _validate_and_add_block( [block], blockchain.pool, {}, - sub_slot_iters=ssi, - difficulty=diff, - prev_ses_block=prev_ses_block, + ValidationState(ssi, diff, prev_ses_block), validate_signatures=False, ) assert pre_validation_results is not None diff --git a/chia/_tests/blockchain/test_blockchain.py b/chia/_tests/blockchain/test_blockchain.py index a6d65d8fc92b..5a734a12e285 100644 --- a/chia/_tests/blockchain/test_blockchain.py +++ b/chia/_tests/blockchain/test_blockchain.py @@ -49,6 +49,7 @@ from chia.types.generator_types import BlockGenerator from chia.types.spend_bundle import SpendBundle from chia.types.unfinished_block import UnfinishedBlock +from chia.types.validation_state import ValidationState from chia.util.cpu import available_logical_cores from chia.util.errors import Err from chia.util.generator_tools import get_block_header @@ -1825,9 +1826,7 @@ async def test_pre_validation_fails_bad_blocks(self, empty_blockchain: Blockchai [blocks[0], block_bad], empty_blockchain.pool, {}, - sub_slot_iters=ssi, - difficulty=difficulty, - prev_ses_block=None, + ValidationState(ssi, difficulty, None), validate_signatures=True, ) assert res[0].error is None @@ -1854,9 +1853,7 @@ async def test_pre_validation( blocks_to_validate, empty_blockchain.pool, {}, - sub_slot_iters=ssi, - difficulty=difficulty, - prev_ses_block=None, + ValidationState(ssi, difficulty, None), validate_signatures=True, ) end_pv = time.time() @@ -1962,9 +1959,7 @@ async def test_conditions( [blocks[-1]], b.pool, {}, - sub_slot_iters=ssi, - difficulty=diff, - prev_ses_block=None, + ValidationState(ssi, diff, None), validate_signatures=False, ) # Ignore errors from pre-validation, we are testing block_body_validation @@ -2086,9 +2081,7 @@ async def test_timelock_conditions( [blocks[-1]], b.pool, {}, - sub_slot_iters=ssi, - difficulty=diff, - prev_ses_block=None, + ValidationState(ssi, diff, None), validate_signatures=True, ) assert pre_validation_results is not None @@ -2167,9 +2160,7 @@ async def test_aggsig_garbage( [blocks[-1]], b.pool, {}, - sub_slot_iters=ssi, - difficulty=diff, - prev_ses_block=None, + ValidationState(ssi, diff, None), validate_signatures=False, ) # Ignore errors from pre-validation, we are testing block_body_validation @@ -2293,9 +2284,7 @@ async def test_ephemeral_timelock( [blocks[-1]], b.pool, {}, - sub_slot_iters=ssi, - difficulty=diff, - prev_ses_block=None, + ValidationState(ssi, diff, None), validate_signatures=True, ) assert pre_validation_results is not None @@ -2654,9 +2643,7 @@ async def test_cost_exceeds_max( [blocks[-1]], b.pool, {}, - sub_slot_iters=ssi, - difficulty=diff, - prev_ses_block=None, + ValidationState(ssi, diff, None), validate_signatures=False, ) assert results is not None @@ -3233,9 +3220,7 @@ async def test_invalid_agg_sig(self, empty_blockchain: Blockchain, bt: BlockTool [last_block], b.pool, {}, - sub_slot_iters=ssi, - difficulty=diff, - prev_ses_block=None, + ValidationState(ssi, diff, None), validate_signatures=True, ) assert preval_results is not None @@ -3351,9 +3336,7 @@ async def test_long_reorg( blocks, b.pool, {}, - sub_slot_iters=ssi, - difficulty=diff, - prev_ses_block=None, + ValidationState(ssi, diff, None), validate_signatures=False, ) for i, block in enumerate(blocks): @@ -3912,9 +3895,7 @@ async def test_reorg_flip_flop(empty_blockchain: Blockchain, bt: BlockTools) -> [block1], b.pool, {}, - sub_slot_iters=ssi, - difficulty=diff, - prev_ses_block=None, + ValidationState(ssi, diff, None), validate_signatures=False, ) _, err, _ = await b.add_block(block1, preval[0], None, sub_slot_iters=ssi) @@ -3925,9 +3906,7 @@ async def test_reorg_flip_flop(empty_blockchain: Blockchain, bt: BlockTools) -> [block2], b.pool, {}, - sub_slot_iters=ssi, - difficulty=diff, - prev_ses_block=None, + ValidationState(ssi, diff, None), validate_signatures=False, ) _, err, _ = await b.add_block(block2, preval[0], None, sub_slot_iters=ssi) @@ -3960,9 +3939,7 @@ async def test_get_tx_peak(default_400_blocks: List[FullBlock], empty_blockchain test_blocks, bc.pool, {}, - sub_slot_iters=ssi, - difficulty=diff, - prev_ses_block=None, + ValidationState(ssi, diff, None), validate_signatures=False, ) diff --git a/chia/_tests/core/full_node/test_full_node.py b/chia/_tests/core/full_node/test_full_node.py index 594827efbc23..7440df15d28a 100644 --- a/chia/_tests/core/full_node/test_full_node.py +++ b/chia/_tests/core/full_node/test_full_node.py @@ -60,6 +60,7 @@ from chia.types.peer_info import PeerInfo, TimestampedPeerInfo from chia.types.spend_bundle import SpendBundle, estimate_fees from chia.types.unfinished_block import UnfinishedBlock +from chia.types.validation_state import ValidationState from chia.util.errors import ConsensusError, Err from chia.util.hash import std_hash from chia.util.ints import uint8, uint16, uint32, uint64, uint128 @@ -432,9 +433,7 @@ async def check_transaction_confirmed(transaction) -> bool: all_blocks[:i], blockchain.pool, {}, - sub_slot_iters=ssi, - difficulty=diff, - prev_ses_block=None, + ValidationState(ssi, diff, None), validate_signatures=False, ) assert results is not None @@ -451,9 +450,7 @@ async def check_transaction_confirmed(transaction) -> bool: all_blocks[:i], blockchain.pool, {}, - sub_slot_iters=ssi, - difficulty=diff, - prev_ses_block=None, + ValidationState(ssi, diff, None), validate_signatures=False, ) assert results is not None diff --git a/chia/_tests/farmer_harvester/test_third_party_harvesters.py b/chia/_tests/farmer_harvester/test_third_party_harvesters.py index 28847dc61f44..f1de2794668f 100644 --- a/chia/_tests/farmer_harvester/test_third_party_harvesters.py +++ b/chia/_tests/farmer_harvester/test_third_party_harvesters.py @@ -39,6 +39,7 @@ from chia.types.blockchain_format.slots import ChallengeChainSubSlot, RewardChainSubSlot from chia.types.full_block import FullBlock from chia.types.peer_info import UnresolvedPeerInfo +from chia.types.validation_state import ValidationState from chia.util.bech32m import decode_puzzle_hash from chia.util.hash import std_hash from chia.util.ints import uint8, uint32, uint64 @@ -441,9 +442,7 @@ async def add_test_blocks_into_full_node(blocks: List[FullBlock], full_node: Ful blocks, full_node.blockchain.pool, {}, - sub_slot_iters=ssi, - difficulty=diff, - prev_ses_block=prev_ses_block, + ValidationState(ssi, diff, prev_ses_block), validate_signatures=True, ) assert pre_validation_results is not None and len(pre_validation_results) == len(blocks) diff --git a/chia/_tests/util/full_sync.py b/chia/_tests/util/full_sync.py index 1ea532fbf7b4..faad229a8529 100644 --- a/chia/_tests/util/full_sync.py +++ b/chia/_tests/util/full_sync.py @@ -25,6 +25,7 @@ from chia.types.blockchain_format.sized_bytes import bytes32 from chia.types.full_block import FullBlock from chia.types.peer_info import PeerInfo +from chia.types.validation_state import ValidationState from chia.util.config import load_config from chia.util.ints import uint16 @@ -206,8 +207,8 @@ async def run_sync_test( ssi, diff = get_next_sub_slot_iters_and_difficulty( full_node.constants, True, block_record, full_node.blockchain ) - success, summary, _, _, _, _ = await full_node.add_block_batch( - block_batch, peer_info, None, current_ssi=ssi, current_difficulty=diff + success, summary, err = await full_node.add_block_batch( + block_batch, peer_info, None, ValidationState(ssi, diff, None) ) end_height = block_batch[-1].height full_node.blockchain.clean_block_record(end_height - full_node.constants.BLOCKS_CACHE_SIZE) diff --git a/chia/_tests/util/misc.py b/chia/_tests/util/misc.py index 82728a025631..3b2bd95fb478 100644 --- a/chia/_tests/util/misc.py +++ b/chia/_tests/util/misc.py @@ -62,6 +62,7 @@ from chia.types.condition_opcodes import ConditionOpcode from chia.types.full_block import FullBlock from chia.types.peer_info import PeerInfo +from chia.types.validation_state import ValidationState from chia.util.batches import to_batches from chia.util.hash import std_hash from chia.util.ints import uint16, uint32, uint64 @@ -705,18 +706,17 @@ async def add_blocks_in_batches( ssi, diff = get_next_sub_slot_iters_and_difficulty( full_node.constants, True, block_record, full_node.blockchain ) - prev_ses_block = None + vs = ValidationState(ssi, diff, None) for block_batch in to_batches(blocks, 64): b = block_batch.entries[0] if (b.height % 128) == 0: print(f"main chain: {b.height:4} weight: {b.weight}") - success, _, ssi, diff, prev_ses_block, err = await full_node.add_block_batch( + # vs is updated by the call to add_block_batch() + success, _, err = await full_node.add_block_batch( block_batch.entries, PeerInfo("0.0.0.0", 0), None, - current_ssi=ssi, - current_difficulty=diff, - prev_ses_block=prev_ses_block, + vs, ) assert err is None assert success is True diff --git a/chia/_tests/wallet/sync/test_wallet_sync.py b/chia/_tests/wallet/sync/test_wallet_sync.py index af12ae6ff6c4..db8848b674f7 100644 --- a/chia/_tests/wallet/sync/test_wallet_sync.py +++ b/chia/_tests/wallet/sync/test_wallet_sync.py @@ -40,6 +40,7 @@ from chia.types.blockchain_format.sized_bytes import bytes32 from chia.types.full_block import FullBlock from chia.types.peer_info import PeerInfo +from chia.types.validation_state import ValidationState from chia.util.hash import std_hash from chia.util.ints import uint32, uint64, uint128 from chia.wallet.nft_wallet.nft_wallet import NFTWallet @@ -362,8 +363,7 @@ async def test_long_sync_wallet( blocks_reorg[-num_blocks - 10 : -1], PeerInfo("0.0.0.0", 0), None, - current_ssi=sub_slot_iters, - current_difficulty=difficulty, + ValidationState(sub_slot_iters, difficulty, None), ) await full_node.add_block(blocks_reorg[-1]) @@ -482,8 +482,7 @@ async def test_wallet_reorg_get_coinbase( blocks_reorg_2[-44:], PeerInfo("0.0.0.0", 0), None, - current_ssi=sub_slot_iters, - current_difficulty=difficulty, + ValidationState(sub_slot_iters, difficulty, None), ) for wallet_node, wallet_server in wallets: diff --git a/chia/consensus/multiprocess_validation.py b/chia/consensus/multiprocess_validation.py index 76591ceace6c..6a434b1eef8c 100644 --- a/chia/consensus/multiprocess_validation.py +++ b/chia/consensus/multiprocess_validation.py @@ -1,12 +1,13 @@ from __future__ import annotations import asyncio +import copy import logging import time import traceback from concurrent.futures import Executor from dataclasses import dataclass -from typing import Dict, List, Optional, Sequence, Tuple +from typing import Dict, List, Optional, Sequence from chia_rs import AugSchemeMPL, SpendBundleConditions @@ -27,6 +28,7 @@ from chia.types.full_block import FullBlock from chia.types.generator_types import BlockGenerator from chia.types.unfinished_block import UnfinishedBlock +from chia.types.validation_state import ValidationState from chia.util.augmented_chain import AugmentedBlockchain from chia.util.block_cache import BlockCache from chia.util.condition_tools import pkm_pairs @@ -165,10 +167,8 @@ async def pre_validate_blocks_multiprocessing( blocks: Sequence[FullBlock], pool: Executor, block_height_conds_map: Dict[uint32, SpendBundleConditions], + vs: ValidationState, *, - sub_slot_iters: uint64, - difficulty: uint64, - prev_ses_block: Optional[BlockRecord], wp_summaries: Optional[List[SubEpochSummary]] = None, validate_signatures: bool = True, ) -> List[PreValidationResult]: @@ -219,15 +219,15 @@ async def pre_validate_blocks_multiprocessing( # they won't actually be added to the underlying blockchain object blockchain = AugmentedBlockchain(block_records) - diff_ssis: List[Tuple[uint64, uint64]] = [] + diff_ssis: List[ValidationState] = [] prev_ses_block_list: List[Optional[BlockRecord]] = [] for block in blocks: if len(block.finished_sub_slots) > 0: if block.finished_sub_slots[0].challenge_chain.new_difficulty is not None: - difficulty = block.finished_sub_slots[0].challenge_chain.new_difficulty + vs.current_difficulty = block.finished_sub_slots[0].challenge_chain.new_difficulty if block.finished_sub_slots[0].challenge_chain.new_sub_slot_iters is not None: - sub_slot_iters = block.finished_sub_slots[0].challenge_chain.new_sub_slot_iters + vs.current_ssi = block.finished_sub_slots[0].challenge_chain.new_sub_slot_iters overflow = is_overflow_block(constants, block.reward_chain_block.signage_point_index) challenge = get_block_challenge(constants, block, BlockCache(recent_blocks), prev_b is None, overflow, False) if block.reward_chain_block.challenge_chain_sp_vdf is None: @@ -244,7 +244,7 @@ async def pre_validate_blocks_multiprocessing( constants.DIFFICULTY_CONSTANT_FACTOR, q_str, block.reward_chain_block.proof_of_space.size, - difficulty, + vs.current_difficulty, cc_sp_hash, ) @@ -254,10 +254,11 @@ async def pre_validate_blocks_multiprocessing( blockchain, required_iters, block, - sub_slot_iters=sub_slot_iters, - prev_ses_block=prev_ses_block, + sub_slot_iters=vs.current_ssi, + prev_ses_block=vs.prev_ses_block, ) except ValueError: + log.exception("block_to_block_record()") return [PreValidationResult(uint16(Err.INVALID_SUB_EPOCH_SUMMARY.value), None, None, False, uint32(0))] if block_rec.sub_epoch_summary_included is not None and wp_summaries is not None: @@ -269,10 +270,10 @@ async def pre_validate_blocks_multiprocessing( recent_blocks[block_rec.header_hash] = block_rec blockchain.add_extra_block(block, block_rec) # Temporarily add block to chain prev_b = block_rec - diff_ssis.append((difficulty, sub_slot_iters)) - prev_ses_block_list.append(prev_ses_block) + diff_ssis.append(copy.copy(vs)) + prev_ses_block_list.append(vs.prev_ses_block) if block_rec.sub_epoch_summary_included is not None: - prev_ses_block = block_rec + vs.prev_ses_block = block_rec conditions_pickled = {} for k, v in block_height_conds_map.items(): @@ -322,8 +323,8 @@ async def pre_validate_blocks_multiprocessing( b_pickled, previous_generators, conditions_pickled, - [diff_ssis[j][0] for j in range(i, end_i)], - [diff_ssis[j][1] for j in range(i, end_i)], + [diff_ssis[j].current_difficulty for j in range(i, end_i)], + [diff_ssis[j].current_ssi for j in range(i, end_i)], validate_signatures, ses_blocks_bytes_list, ) diff --git a/chia/full_node/full_node.py b/chia/full_node/full_node.py index b4068fea8956..aed5683db503 100644 --- a/chia/full_node/full_node.py +++ b/chia/full_node/full_node.py @@ -2,6 +2,7 @@ import asyncio import contextlib +import copy import dataclasses import logging import multiprocessing @@ -84,6 +85,7 @@ from chia.types.spend_bundle import SpendBundle from chia.types.transaction_queue_entry import TransactionQueueEntry from chia.types.unfinished_block import UnfinishedBlock +from chia.types.validation_state import ValidationState from chia.types.weight_proof import WeightProof from chia.util.bech32m import encode_puzzle_hash from chia.util.check_fork_next_block import check_fork_next_block @@ -607,8 +609,9 @@ async def short_sync_batch(self, peer: WSChiaConnection, start_height: uint32, t ssi, diff = get_next_sub_slot_iters_and_difficulty( self.constants, new_slot, prev_b, self.blockchain ) - success, state_change_summary, ssi, diff, _, _ = await self.add_block_batch( - response.blocks, peer_info, None, ssi, diff + vs = ValidationState(ssi, diff, None) + success, state_change_summary, err = await self.add_block_batch( + response.blocks, peer_info, None, vs ) if not success: raise ValueError(f"Error short batch syncing, failed to validate blocks {height}-{end_height}") @@ -1085,6 +1088,18 @@ async def sync_from_fork_point( # normally "fork_point" or "fork_height" refers to the first common # block between the main chain and the fork. Here "fork_point_height" # seems to refer to the first diverging block + fork_info: Optional[ForkInfo] = None + if fork_point_height == 0: + ssi = self.constants.SUB_SLOT_ITERS_STARTING + diff = self.constants.DIFFICULTY_STARTING + prev_ses_block = None + else: + prev_b_hash = self.blockchain.height_to_hash(fork_point_height) + assert prev_b_hash is not None + prev_b = await self.blockchain.get_full_block(prev_b_hash) + assert prev_b is not None + ssi, diff, prev_ses_block = await self.get_sub_slot_iters_difficulty_ses_block(prev_b, None, None) + vs = ValidationState(ssi, diff, prev_ses_block) async def fetch_block_batches( batch_queue: asyncio.Queue[Optional[Tuple[WSChiaConnection, List[FullBlock]]]] @@ -1124,17 +1139,7 @@ async def fetch_block_batches( async def validate_block_batches( inner_batch_queue: asyncio.Queue[Optional[Tuple[WSChiaConnection, List[FullBlock]]]] ) -> None: - fork_info: Optional[ForkInfo] = None - if fork_point_height == 0: - ssi = self.constants.SUB_SLOT_ITERS_STARTING - diff = self.constants.DIFFICULTY_STARTING - prev_ses_block = None - else: - prev_b_hash = self.blockchain.height_to_hash(fork_point_height) - assert prev_b_hash is not None - prev_b = await self.blockchain.get_full_block(prev_b_hash) - assert prev_b is not None - ssi, diff, prev_ses_block = await self.get_sub_slot_iters_difficulty_ses_block(prev_b, None, None) + nonlocal fork_info block_rate = 0 block_rate_time = time.monotonic() block_rate_height = -1 @@ -1172,13 +1177,13 @@ async def validate_block_batches( assert fork_hash is not None fork_info = ForkInfo(fork_point_height - 1, fork_point_height - 1, fork_hash) - success, state_change_summary, ssi, diff, prev_ses_block, err = await self.add_block_batch( + # The ValidationState object (vs) is an in-out parameter. the add_block_batch() + # call will update it + success, state_change_summary, err = await self.add_block_batch( blocks, peer.get_peer_logging(), fork_info, - ssi, - diff, - prev_ses_block, + vs, summaries, ) if success is False: @@ -1279,11 +1284,9 @@ async def add_block_batch( all_blocks: List[FullBlock], peer_info: PeerInfo, fork_info: Optional[ForkInfo], - current_ssi: uint64, - current_difficulty: uint64, - prev_ses_block: Optional[BlockRecord] = None, + vs: ValidationState, # in-out parameter wp_summaries: Optional[List[SubEpochSummary]] = None, - ) -> Tuple[bool, Optional[StateChangeSummary], uint64, uint64, Optional[BlockRecord], Optional[Err]]: + ) -> Tuple[bool, Optional[StateChangeSummary], Optional[Err]]: # Precondition: All blocks must be contiguous blocks, index i+1 must be the parent of index i # Returns a bool for success, as well as a StateChangeSummary if the peak was advanced @@ -1298,11 +1301,11 @@ async def add_block_batch( self.blockchain.add_block_record(block_rec) if block_rec.sub_epoch_summary_included: # already validated block, update sub slot iters, difficulty and prev sub epoch summary - prev_ses_block = block_rec + vs.prev_ses_block = block_rec if block_rec.sub_epoch_summary_included.new_sub_slot_iters is not None: - current_ssi = block_rec.sub_epoch_summary_included.new_sub_slot_iters + vs.current_ssi = block_rec.sub_epoch_summary_included.new_sub_slot_iters if block_rec.sub_epoch_summary_included.new_difficulty is not None: - current_difficulty = block_rec.sub_epoch_summary_included.new_difficulty + vs.current_difficulty = block_rec.sub_epoch_summary_included.new_difficulty if fork_info is None: continue @@ -1328,10 +1331,13 @@ async def add_block_batch( await self.blockchain.run_single_block(block, fork_info) if len(blocks_to_validate) == 0: - return True, None, current_ssi, current_difficulty, prev_ses_block, None + return True, None, None # Validates signatures in multiprocessing since they take a while, and we don't have cached transactions # for these blocks (unlike during normal operation where we validate one at a time) + # We have to copy the ValidationState object to preserve it for the add_block() + # call below. pre_validate_blocks_multiprocessing() will update the + # object we pass in. pre_validate_start = time.monotonic() pre_validation_results: List[PreValidationResult] = await pre_validate_blocks_multiprocessing( self.blockchain.constants, @@ -1339,9 +1345,7 @@ async def add_block_batch( blocks_to_validate, self.blockchain.pool, {}, - sub_slot_iters=current_ssi, - difficulty=current_difficulty, - prev_ses_block=prev_ses_block, + copy.copy(vs), wp_summaries=wp_summaries, validate_signatures=True, ) @@ -1363,15 +1367,13 @@ async def add_block_batch( return ( False, None, - current_ssi, - current_difficulty, - prev_ses_block, Err(pre_validation_results[i].error), ) agg_state_change_summary: Optional[StateChangeSummary] = None block_record = await self.blockchain.get_block_record_from_db(blocks_to_validate[0].prev_header_hash) for i, block in enumerate(blocks_to_validate): + assert vs.prev_ses_block is None or vs.prev_ses_block.height < block.height assert pre_validation_results[i].required_iters is not None state_change_summary: Optional[StateChangeSummary] # when adding blocks in batches, we won't have any overlapping @@ -1385,13 +1387,13 @@ async def add_block_batch( self.constants, True, block_record, self.blockchain ) assert cc_sub_slot.new_sub_slot_iters is not None - current_ssi = cc_sub_slot.new_sub_slot_iters + vs.current_ssi = cc_sub_slot.new_sub_slot_iters assert cc_sub_slot.new_difficulty is not None - current_difficulty = cc_sub_slot.new_difficulty - assert expected_sub_slot_iters == current_ssi - assert expected_difficulty == current_difficulty + vs.current_difficulty = cc_sub_slot.new_difficulty + assert expected_sub_slot_iters == vs.current_ssi + assert expected_difficulty == vs.current_difficulty result, error, state_change_summary = await self.blockchain.add_block( - block, pre_validation_results[i], None, current_ssi, fork_info, prev_ses_block=prev_ses_block + block, pre_validation_results[i], None, vs.current_ssi, fork_info, prev_ses_block=vs.prev_ses_block ) if result == AddBlockResult.NEW_PEAK: @@ -1417,11 +1419,11 @@ async def add_block_batch( elif result == AddBlockResult.INVALID_BLOCK or result == AddBlockResult.DISCONNECTED_BLOCK: if error is not None: self.log.error(f"Error: {error}, Invalid block from peer: {peer_info} ") - return False, agg_state_change_summary, current_ssi, current_difficulty, prev_ses_block, error + return False, agg_state_change_summary, error block_record = self.blockchain.block_record(block.header_hash) assert block_record is not None if block_record.sub_epoch_summary_included is not None: - prev_ses_block = block_record + vs.prev_ses_block = block_record if self.weight_proof_handler is not None: await self.weight_proof_handler.create_prev_sub_epoch_segments() if agg_state_change_summary is not None: @@ -1430,7 +1432,7 @@ async def add_block_batch( f"Total time for {len(blocks_to_validate)} blocks: {time.monotonic() - pre_validate_start}, " f"advanced: True" ) - return True, agg_state_change_summary, current_ssi, current_difficulty, prev_ses_block, None + return True, agg_state_change_summary, None async def get_sub_slot_iters_difficulty_ses_block( self, block: FullBlock, ssi: Optional[uint64], diff: Optional[uint64] @@ -1869,9 +1871,7 @@ async def add_block( [block], self.blockchain.pool, block_height_conds_map, - sub_slot_iters=ssi, - difficulty=diff, - prev_ses_block=prev_ses_block, + ValidationState(ssi, diff, prev_ses_block), validate_signatures=False, ) added: Optional[AddBlockResult] = None diff --git a/chia/simulator/full_node_simulator.py b/chia/simulator/full_node_simulator.py index 91b36784ed92..8c6fa57c59fa 100644 --- a/chia/simulator/full_node_simulator.py +++ b/chia/simulator/full_node_simulator.py @@ -22,6 +22,7 @@ from chia.types.coin_record import CoinRecord from chia.types.full_block import FullBlock from chia.types.spend_bundle import SpendBundle +from chia.types.validation_state import ValidationState from chia.util.config import lock_and_load_config, save_config from chia.util.ints import uint8, uint32, uint64, uint128 from chia.util.timing import adjusted_timeout, backoff_times @@ -176,9 +177,7 @@ async def farm_new_transaction_block( [genesis], self.full_node.blockchain.pool, {}, - sub_slot_iters=ssi, - difficulty=diff, - prev_ses_block=None, + ValidationState(ssi, diff, None), validate_signatures=True, ) assert pre_validation_results is not None @@ -227,7 +226,7 @@ async def farm_new_transaction_block( async def farm_new_block(self, request: FarmNewBlockProtocol, force_wait_for_timestamp: bool = False): ssi = self.full_node.constants.SUB_SLOT_ITERS_STARTING - diffculty = self.full_node.constants.DIFFICULTY_STARTING + diff = self.full_node.constants.DIFFICULTY_STARTING async with self.full_node.blockchain.priority_mutex.acquire(priority=BlockchainMutexPriority.high): self.log.info("Farming new block!") current_blocks = await self.get_all_full_blocks() @@ -239,9 +238,7 @@ async def farm_new_block(self, request: FarmNewBlockProtocol, force_wait_for_tim [genesis], self.full_node.blockchain.pool, {}, - sub_slot_iters=ssi, - difficulty=diffculty, - prev_ses_block=None, + ValidationState(ssi, diff, None), validate_signatures=True, ) assert pre_validation_results is not None diff --git a/chia/types/validation_state.py b/chia/types/validation_state.py new file mode 100644 index 000000000000..f3382f68d446 --- /dev/null +++ b/chia/types/validation_state.py @@ -0,0 +1,14 @@ +from __future__ import annotations + +import dataclasses +from typing import Optional + +from chia.consensus.block_record import BlockRecord +from chia.util.ints import uint64 + + +@dataclasses.dataclass +class ValidationState: + current_ssi: uint64 + current_difficulty: uint64 + prev_ses_block: Optional[BlockRecord] = None diff --git a/tools/test_full_sync.py b/tools/test_full_sync.py index f944f702667b..20752d8055ed 100755 --- a/tools/test_full_sync.py +++ b/tools/test_full_sync.py @@ -19,6 +19,7 @@ from chia.full_node.full_node import FullNode from chia.server.ws_connection import WSChiaConnection from chia.types.full_block import FullBlock +from chia.types.validation_state import ValidationState from chia.util.config import load_config @@ -158,8 +159,8 @@ async def run_sync_checkpoint( ssi, diff = get_next_sub_slot_iters_and_difficulty( full_node.constants, True, block_record, full_node.blockchain ) - success, _, _, _, _, _ = await full_node.add_block_batch( - block_batch, peer_info, None, current_ssi=ssi, current_difficulty=diff + success, _, err = await full_node.add_block_batch( + block_batch, peer_info, None, ValidationState(ssi, diff, None) ) end_height = block_batch[-1].height full_node.blockchain.clean_block_record(end_height - full_node.constants.BLOCKS_CACHE_SIZE) @@ -176,8 +177,8 @@ async def run_sync_checkpoint( ssi, diff = get_next_sub_slot_iters_and_difficulty( full_node.constants, True, block_record, full_node.blockchain ) - success, _, _, _, _, _ = await full_node.add_block_batch( - block_batch, peer_info, None, current_ssi=ssi, current_difficulty=diff + success, _, err = await full_node.add_block_batch( + block_batch, peer_info, None, ValidationState(ssi, diff, None) ) if not success: raise RuntimeError("failed to ingest block batch") From 5f6c336e757534d3a36b1a03612e05b412a18c61 Mon Sep 17 00:00:00 2001 From: matt-o-how <48453825+matt-o-how@users.noreply.github.com> Date: Mon, 7 Oct 2024 19:09:41 +0100 Subject: [PATCH 29/69] [CHIA-621] Add a configurable limit to the amount of DIDs that can be automatically added to the users wallet from transfer (#18655) * add cap to amount of DID wallets that can be automatically created * add further testing * add extra tests for recovering discarded DID and making new DIDs above the limit * lint fix * add types to test functions * add log warning when ignoring DID * typing fixes for tests * a few more typing fixes * black * Update chia/_tests/wallet/did_wallet/test_did.py Co-authored-by: Arvid Norberg * fix wrong wallet node get_wallet() call * fix rebase * remove unused typeignore --------- Co-authored-by: Arvid Norberg --- chia/_tests/wallet/did_wallet/test_did.py | 194 +++++++++++++++++++++- chia/rpc/wallet_rpc_api.py | 11 +- chia/util/initial-config.yaml | 3 + chia/wallet/did_wallet/did_wallet.py | 6 +- chia/wallet/wallet_state_manager.py | 30 ++-- 5 files changed, 220 insertions(+), 24 deletions(-) diff --git a/chia/_tests/wallet/did_wallet/test_did.py b/chia/_tests/wallet/did_wallet/test_did.py index b023fd6beffa..7a405aab702c 100644 --- a/chia/_tests/wallet/did_wallet/test_did.py +++ b/chia/_tests/wallet/did_wallet/test_did.py @@ -2,6 +2,7 @@ import dataclasses import json +from typing import List, Tuple import pytest from chia_rs import AugSchemeMPL, G1Element, G2Element @@ -12,6 +13,9 @@ from chia._tests.util.time_out_assert import time_out_assert from chia.rpc.wallet_request_types import DIDGetCurrentCoinInfo, DIDGetRecoveryInfo from chia.rpc.wallet_rpc_api import WalletRpcApi +from chia.server.server import ChiaServer +from chia.simulator.block_tools import BlockTools +from chia.simulator.full_node_simulator import FullNodeSimulator from chia.simulator.simulator_protocol import FarmNewBlockProtocol from chia.types.blockchain_format.program import Program from chia.types.blockchain_format.sized_bytes import bytes32 @@ -26,6 +30,7 @@ from chia.wallet.util.address_type import AddressType from chia.wallet.util.tx_config import DEFAULT_TX_CONFIG from chia.wallet.util.wallet_types import WalletType +from chia.wallet.wallet_node import WalletNode from chia.wallet.wallet_spend_bundle import WalletSpendBundle @@ -46,8 +51,8 @@ class TestDIDWallet: ) @pytest.mark.anyio async def test_creation_from_coin_spend( - self, self_hostname, two_nodes_two_wallets_with_same_keys: OldSimulatorsAndWallets, trusted - ): + self, self_hostname: str, two_nodes_two_wallets_with_same_keys: OldSimulatorsAndWallets, trusted: bool + ) -> None: """ Verify that DIDWallet.create_new_did_wallet_from_coin_spend() is called after Singleton creation on the blockchain, and that the wallet is created in the second wallet node. @@ -769,7 +774,7 @@ async def test_did_recovery_with_empty_set(self, wallet_environments: WalletTest ] ) coin = await did_wallet.get_coin() - info = Program.to([]) + info: List[Tuple[bytes, bytes, int]] = [] pubkey = (await did_wallet.wallet_state_manager.get_unused_derivation_record(did_wallet.wallet_info.id)).pubkey with pytest.raises(Exception): # We expect a CLVM 80 error for this test async with did_wallet.wallet_state_manager.new_action_scope( @@ -778,7 +783,7 @@ async def test_did_recovery_with_empty_set(self, wallet_environments: WalletTest await did_wallet.recovery_spend( coin, ph, - info, # type: ignore + info, pubkey, WalletSpendBundle([], AugSchemeMPL.aggregate([])), action_scope, @@ -966,6 +971,8 @@ async def test_did_attest_after_recovery(self, wallet_environments: WalletTestFr ] ) + await time_out_assert(15, did_wallet.get_confirmed_balance, 101) + await time_out_assert(15, did_wallet.get_unconfirmed_balance, 101) recovery_list = [bytes32.from_hexstr(did_wallet.get_my_DID())] async with wallet_1.wallet_state_manager.new_action_scope( @@ -1354,6 +1361,165 @@ async def test_did_transfer(self, wallet_environments: WalletTestFramework, with new_puzhash, ) + @pytest.mark.parametrize( + "trusted", + [True, False], + ) + @pytest.mark.anyio + async def test_did_auto_transfer_limit( + self, + self_hostname: str, + two_wallet_nodes: Tuple[List[FullNodeSimulator], List[Tuple[WalletNode, ChiaServer]], BlockTools], + trusted: bool, + ) -> None: + fee = uint64(1000) + full_nodes, wallets, _ = two_wallet_nodes + full_node_api = full_nodes[0] + server_1 = full_node_api.server + wallet_node, server_2 = wallets[0] + wallet_node_2, server_3 = wallets[1] + wallet = wallet_node.wallet_state_manager.main_wallet + wallet2 = wallet_node_2.wallet_state_manager.main_wallet + api_1 = WalletRpcApi(wallet_node_2) + ph = await wallet.get_new_puzzlehash() + + if trusted: + wallet_node.config["trusted_peers"] = { + full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex() + } + wallet_node_2.config["trusted_peers"] = { + full_node_api.full_node.server.node_id.hex(): full_node_api.full_node.server.node_id.hex() + } + else: + wallet_node.config["trusted_peers"] = {} + wallet_node_2.config["trusted_peers"] = {} + + await server_2.start_client(PeerInfo(self_hostname, server_1.get_port()), None) + await server_3.start_client(PeerInfo(self_hostname, server_1.get_port()), None) + await full_node_api.farm_blocks_to_wallet(1, wallet) + + # Check that we cap out at 10 DID Wallets automatically created upon transfer received + for i in range(0, 14): + async with wallet.wallet_state_manager.new_action_scope(DEFAULT_TX_CONFIG, push=True) as action_scope: + did_wallet_1: DIDWallet = await DIDWallet.create_new_did_wallet( + wallet_node.wallet_state_manager, + wallet, + uint64(101), + action_scope, + [bytes32(bytes(ph))], + uint64(1), + {"Twitter": "Test", "GitHub": "测试"}, + fee=fee, + ) + assert did_wallet_1.get_name() == "Profile 1" + await full_node_api.process_transaction_records(records=action_scope.side_effects.transactions) + await full_node_api.wait_for_wallets_synced(wallet_nodes=[wallet_node, wallet_node_2]) + await time_out_assert(15, did_wallet_1.get_confirmed_balance, 101) + await time_out_assert(15, did_wallet_1.get_unconfirmed_balance, 101) + # Transfer DID + assert did_wallet_1.did_info.origin_coin is not None + origin_coin = did_wallet_1.did_info.origin_coin + new_puzhash = await wallet2.get_new_puzzlehash() + async with did_wallet_1.wallet_state_manager.new_action_scope(DEFAULT_TX_CONFIG, push=True) as action_scope: + await did_wallet_1.transfer_did(new_puzhash, fee, False, action_scope) + await full_node_api.process_transaction_records(records=action_scope.side_effects.transactions) + await full_node_api.wait_for_wallets_synced(wallet_nodes=[wallet_node, wallet_node_2]) + # Check if the DID wallet is created in the wallet2 + + await time_out_assert( + 30, get_wallet_num, min(2 + i, 11), wallet_node_2.wallet_state_manager + ) # check we haven't made more than 10 DID wallets + await time_out_assert(30, get_wallet_num, 1, wallet_node.wallet_state_manager) + # Get the new DID wallets + did_wallets = list( + filter( + lambda w: (w.type == WalletType.DECENTRALIZED_ID), + await wallet_node_2.wallet_state_manager.get_all_wallet_info_entries(), + ) + ) + + assert len(did_wallets) == 10 + # Test we can use the DID + did_wallet_10 = wallet_node_2.wallet_state_manager.get_wallet( + id=uint32(did_wallets[9].id), required_type=DIDWallet + ) + # Delete the coin and change inner puzzle + coin = await did_wallet_10.get_coin() + # origin_coin = did_wallet_10.did_info.origin_coin + backup_data = did_wallet_10.create_backup() + await wallet_node_2.wallet_state_manager.coin_store.delete_coin_record(coin.name()) + await time_out_assert(15, did_wallet_10.get_confirmed_balance, 0) + await wallet_node_2.wallet_state_manager.user_store.delete_wallet(did_wallet_10.wallet_info.id) + wallet_node_2.wallet_state_manager.wallets.pop(did_wallet_10.wallet_info.id) + # Recover the coin + async with wallet_node_2.wallet_state_manager.lock: + did_wallet_10 = await DIDWallet.create_new_did_wallet_from_recovery( + wallet_node_2.wallet_state_manager, + wallet2, + backup_data, + ) + assert did_wallet_10.did_info.origin_coin is not None + resp = await api_1.did_find_lost_did({"coin_id": did_wallet_10.did_info.origin_coin.name().hex()}) + assert resp["success"] + await time_out_assert(15, did_wallet_10.get_confirmed_balance, 101) + await time_out_assert(15, did_wallet_10.get_unconfirmed_balance, 101) + + # Check we can recover an auto-discarded DID + did_wallet_9 = wallet_node_2.wallet_state_manager.get_wallet( + id=uint32(did_wallets[8].id), required_type=DIDWallet + ) + # Delete the coin and wallet to make space for a auto-discarded DID + coin = await did_wallet_9.get_coin() + await wallet_node_2.wallet_state_manager.coin_store.delete_coin_record(coin.name()) + await time_out_assert(15, did_wallet_9.get_confirmed_balance, 0) + await wallet_node_2.wallet_state_manager.user_store.delete_wallet(did_wallet_9.wallet_info.id) + wallet_node_2.wallet_state_manager.wallets.pop(did_wallet_9.wallet_info.id) + + did_wallets = list( + filter( + lambda w: (w.type == WalletType.DECENTRALIZED_ID), + await wallet_node_2.wallet_state_manager.get_all_wallet_info_entries(), + ) + ) + assert len(did_wallets) == 9 + + # Try and find lost coin + resp = await api_1.did_find_lost_did({"coin_id": origin_coin.name().hex()}) + did_wallets = list( + filter( + lambda w: (w.type == WalletType.DECENTRALIZED_ID), + await wallet_node_2.wallet_state_manager.get_all_wallet_info_entries(), + ) + ) + assert len(did_wallets) == 10 + + # Check we can still manually add new DIDs while at cap + await full_node_api.farm_blocks_to_wallet(1, wallet2) + ph = await wallet2.get_new_puzzlehash() + async with wallet2.wallet_state_manager.new_action_scope(DEFAULT_TX_CONFIG, push=True) as action_scope: + did_wallet_11: DIDWallet = await DIDWallet.create_new_did_wallet( + wallet_node_2.wallet_state_manager, + wallet2, + uint64(101), + action_scope, + [bytes32(bytes(ph))], + uint64(1), + {"Twitter": "Test", "GitHub": "测试"}, + fee=fee, + ) + await full_node_api.process_transaction_records(records=action_scope.side_effects.transactions) + await full_node_api.wait_for_wallets_synced(wallet_nodes=[wallet_node, wallet_node_2]) + await time_out_assert(15, did_wallet_11.get_confirmed_balance, 101) + await time_out_assert(15, did_wallet_11.get_unconfirmed_balance, 101) + + did_wallets = list( + filter( + lambda w: (w.type == WalletType.DECENTRALIZED_ID), + await wallet_node_2.wallet_state_manager.get_all_wallet_info_entries(), + ) + ) + assert len(did_wallets) == 11 + @pytest.mark.limit_consensus_modes(allowed=[ConsensusMode.PLAIN], reason="irrelevant") @pytest.mark.parametrize("wallet_environments", [{"num_environments": 1, "blocks_needed": [1]}], indirect=True) @pytest.mark.anyio @@ -1811,6 +1977,7 @@ async def test_did_sign_message(self, wallet_environments: WalletTestFramework): # Test general string assert did_wallet_1.did_info.origin_coin is not None # mypy message = "Hello World" + assert did_wallet_1.did_info.origin_coin is not None response = await api_0.sign_message_by_id( { "id": encode_puzzle_hash(did_wallet_1.did_info.origin_coin.name(), AddressType.DID.value), @@ -1842,6 +2009,7 @@ async def test_did_sign_message(self, wallet_environments: WalletTestFramework): # Test BLS sign string message = "Hello World" + assert did_wallet_1.did_info.origin_coin is not None response = await api_0.sign_message_by_id( { "id": encode_puzzle_hash(did_wallet_1.did_info.origin_coin.name(), AddressType.DID.value), @@ -1858,6 +2026,7 @@ async def test_did_sign_message(self, wallet_environments: WalletTestFramework): ) # Test BLS sign hex message = "0123456789ABCDEF" + assert did_wallet_1.did_info.origin_coin is not None response = await api_0.sign_message_by_id( { "id": encode_puzzle_hash(did_wallet_1.did_info.origin_coin.name(), AddressType.DID.value), @@ -1878,7 +2047,9 @@ async def test_did_sign_message(self, wallet_environments: WalletTestFramework): [True, False], ) @pytest.mark.anyio - async def test_create_did_with_recovery_list(self, self_hostname, two_nodes_two_wallets_with_same_keys, trusted): + async def test_create_did_with_recovery_list( + self, self_hostname: str, two_nodes_two_wallets_with_same_keys: OldSimulatorsAndWallets, trusted: bool + ) -> None: """ A DID is created on-chain in client0, causing a DID Wallet to be created in client1, which shares the same key. This can happen if someone uses the same key on multiple computers, or is syncing a wallet from scratch. @@ -1922,7 +2093,7 @@ async def test_create_did_with_recovery_list(self, self_hostname, two_nodes_two_ # Node 0 sets up a DID Wallet with a backup set, but num_of_backup_ids_needed=0 # (a malformed solution, but legal for the clvm puzzle) - recovery_list = [bytes.fromhex("00" * 32)] + recovery_list = [bytes32(bytes.fromhex("00" * 32))] async with wallet_0.wallet_state_manager.new_action_scope(DEFAULT_TX_CONFIG, push=True) as action_scope: did_wallet_0: DIDWallet = await DIDWallet.create_new_did_wallet( wallet_node_0.wallet_state_manager, @@ -1930,7 +2101,7 @@ async def test_create_did_with_recovery_list(self, self_hostname, two_nodes_two_ uint64(101), action_scope, backups_ids=recovery_list, - num_of_backup_ids_needed=0, + num_of_backup_ids_needed=uint64(0), ) await full_node_api.process_transaction_records(records=action_scope.side_effects.transactions) @@ -1962,7 +2133,12 @@ async def test_create_did_with_recovery_list(self, self_hostname, two_nodes_two_ [True, False], ) @pytest.mark.anyio - async def test_did_resync(self, self_hostname, two_wallet_nodes, trusted) -> None: + async def test_did_resync( + self, + self_hostname: str, + two_wallet_nodes: Tuple[List[FullNodeSimulator], List[Tuple[WalletNode, ChiaServer]], BlockTools], + trusted: bool, + ) -> None: full_nodes, wallets, _ = two_wallet_nodes full_node_api = full_nodes[0] full_node_server = full_node_api.full_node.server @@ -1980,6 +2156,7 @@ async def test_did_resync(self, self_hostname, two_wallet_nodes, trusted) -> Non else: wallet_node_1.config["trusted_peers"] = {} wallet_node_2.config["trusted_peers"] = {} + assert full_node_server._port is not None await wallet_server_1.start_client(PeerInfo(self_hostname, uint16(full_node_server._port)), None) await wallet_server_2.start_client(PeerInfo(self_hostname, uint16(full_node_server._port)), None) await full_node_api.farm_blocks_to_wallet(1, wallet) @@ -2029,6 +2206,7 @@ async def test_did_resync(self, self_hostname, two_wallet_nodes, trusted) -> Non # Start resync await wallet_node_1._start_with_fingerprint(fingerprint_1) await wallet_node_2._start_with_fingerprint(fingerprint_2) + assert full_node_server._port is not None await wallet_server_1.start_client(PeerInfo(self_hostname, uint16(full_node_server._port)), None) await wallet_server_2.start_client(PeerInfo(self_hostname, uint16(full_node_server._port)), None) await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(bytes32(b"\00" * 32))) diff --git a/chia/rpc/wallet_rpc_api.py b/chia/rpc/wallet_rpc_api.py index 8328425b11f3..542c2800a4b1 100644 --- a/chia/rpc/wallet_rpc_api.py +++ b/chia/rpc/wallet_rpc_api.py @@ -2553,17 +2553,18 @@ async def did_find_lost_did(self, request: Dict[str, Any]) -> EndpointResult: if hinted_coin.coin.amount % 2 == 1 and hinted_coin.hint is not None: hint = hinted_coin.hint break - if hint is None: + derivation_record = None + if hint is not None: + derivation_record = ( + await self.service.wallet_state_manager.puzzle_store.get_derivation_record_for_puzzle_hash(hint) + ) + if derivation_record is None: # This is an invalid DID, check if we are owner derivation_record = ( await self.service.wallet_state_manager.puzzle_store.get_derivation_record_for_puzzle_hash( p2_puzzle.get_tree_hash() ) ) - else: - derivation_record = ( - await self.service.wallet_state_manager.puzzle_store.get_derivation_record_for_puzzle_hash(hint) - ) launcher_id = bytes32(singleton_struct.rest().first().as_atom()) if derivation_record is None: diff --git a/chia/util/initial-config.yaml b/chia/util/initial-config.yaml index e3d419500716..f913ae4c9b54 100644 --- a/chia/util/initial-config.yaml +++ b/chia/util/initial-config.yaml @@ -564,6 +564,9 @@ wallet: # the user accepts the risk/responsibility of verifying the authenticity and origin of unknown CATs automatically_add_unknown_cats: False + # if an unknown DID is sent to us, a wallet will be automatically created + did_auto_add_limit: 10 + # Interval to resend unconfirmed transactions, even if previously accepted into Mempool tx_resend_timeout_secs: 1800 diff --git a/chia/wallet/did_wallet/did_wallet.py b/chia/wallet/did_wallet/did_wallet.py index 1f2c63564f59..28cb5875199e 100644 --- a/chia/wallet/did_wallet/did_wallet.py +++ b/chia/wallet/did_wallet/did_wallet.py @@ -1445,7 +1445,11 @@ def deserialize_backup_data(backup_data: str) -> DIDInfo: if num_of_backup_ids_needed > len(backup_ids): raise Exception innerpuz: Program = Program.from_bytes(bytes.fromhex(details[4])) - metadata: str = details[6] + metadata: str = "" + for d in details[6:]: + metadata = metadata + d + ":" + if len(metadata) > 0: + metadata = metadata[:-1] did_info = DIDInfo( origin_coin=origin, backup_ids=backup_ids, diff --git a/chia/wallet/wallet_state_manager.py b/chia/wallet/wallet_state_manager.py index 83dd0a06820c..8d87a46d4ab1 100644 --- a/chia/wallet/wallet_state_manager.py +++ b/chia/wallet/wallet_state_manager.py @@ -1321,27 +1321,37 @@ async def handle_did( launch_coin: CoinState = response[0] origin_coin = launch_coin.coin + did_wallet_count = 0 for wallet in self.wallets.values(): if wallet.type() == WalletType.DECENTRALIZED_ID: assert isinstance(wallet, DIDWallet) assert wallet.did_info.origin_coin is not None if origin_coin.name() == wallet.did_info.origin_coin.name(): return WalletIdentifier.create(wallet) + did_wallet_count += 1 if coin_state.spent_height is not None: # The first coin we received for DID wallet is spent. # This means the wallet is in a resync process, skip the coin return None - did_wallet = await DIDWallet.create_new_did_wallet_from_coin_spend( - self, - self.main_wallet, - launch_coin.coin, - did_puzzle, - coin_spend, - f"DID {encode_puzzle_hash(launch_id, AddressType.DID.hrp(self.config))}", + # check we aren't above the auto-add wallet limit + limit = self.config.get("did_auto_add_limit", 10) + if did_wallet_count < limit: + did_wallet = await DIDWallet.create_new_did_wallet_from_coin_spend( + self, + self.main_wallet, + launch_coin.coin, + did_puzzle, + coin_spend, + f"DID {encode_puzzle_hash(launch_id, AddressType.DID.hrp(self.config))}", + ) + wallet_identifier = WalletIdentifier.create(did_wallet) + self.state_changed("wallet_created", wallet_identifier.id, {"did_id": did_wallet.get_my_DID()}) + return wallet_identifier + # we are over the limit + self.log.warning( + f"You are at the max configured limit of {limit} DIDs. Ignoring received DID {launch_id.hex()}" ) - wallet_identifier = WalletIdentifier.create(did_wallet) - self.state_changed("wallet_created", wallet_identifier.id, {"did_id": did_wallet.get_my_DID()}) - return wallet_identifier + return None async def get_minter_did(self, launcher_coin: Coin, peer: WSChiaConnection) -> Optional[bytes32]: # Get minter DID From 08a9a4d2b7f833730a59c8b970f9bcd16b846bec Mon Sep 17 00:00:00 2001 From: Matt Hauff Date: Tue, 8 Oct 2024 12:11:14 -0700 Subject: [PATCH 30/69] [CHIA-1569] Fix fee behavior with `chia wallet coins combine` (#18679) * Fix coin fees wrt coin combining * Add override behavior --- chia/_tests/cmds/wallet/test_coins.py | 18 +++-- chia/_tests/wallet/rpc/test_wallet_rpc.py | 82 +++++++++++++++++++++++ chia/cmds/coin_funcs.py | 9 +++ chia/cmds/coins.py | 3 + chia/rpc/wallet_rpc_api.py | 27 +++++--- 5 files changed, 123 insertions(+), 16 deletions(-) diff --git a/chia/_tests/cmds/wallet/test_coins.py b/chia/_tests/cmds/wallet/test_coins.py index 751140808510..2c5b84ee830c 100644 --- a/chia/_tests/cmds/wallet/test_coins.py +++ b/chia/_tests/cmds/wallet/test_coins.py @@ -67,6 +67,7 @@ async def combine_coins( inst_rpc_client = CoinsCombineRpcClient() # pylint: disable=no-value-for-parameter test_rpc_clients.wallet_rpc_client = inst_rpc_client + assert sum(coin.amount for coin in STD_TX.removals) < 500_000_000_000 command_args = [ "wallet", "coins", @@ -74,7 +75,7 @@ async def combine_coins( FINGERPRINT_ARG, "-i1", "--largest-first", - "-m0.001", + "-m0.5", "--min-amount", "0.1", "--max-amount", @@ -91,11 +92,13 @@ async def combine_coins( "150", ] # these are various things that should be in the output + assert_list = ["Fee is >= the amount of coins selected. To continue, please use --override flag."] + run_cli_command_and_assert(capsys, root_dir, command_args, assert_list) assert_list = [ "Transactions would combine up to 500 coins", f"To get status, use command: chia wallet get_transaction -f {FINGERPRINT} -tx 0x{STD_TX.name.hex()}", ] - run_cli_command_and_assert(capsys, root_dir, command_args, assert_list) + run_cli_command_and_assert(capsys, root_dir, command_args + ["--override"], assert_list) expected_tx_config = TXConfig( min_coin_amount=uint64(100_000_000_000), max_coin_amount=uint64(200_000_000_000), @@ -109,13 +112,18 @@ async def combine_coins( largest_first=True, target_coin_ids=[bytes32([0] * 32)], target_coin_amount=uint64(1_000_000_000_000), - fee=uint64(1_000_000_000), + fee=uint64(500_000_000_000), push=False, ) expected_calls: logType = { - "get_wallets": [(None,)], - "get_synced": [()], + "get_wallets": [(None,)] * 2, + "get_synced": [()] * 2, "combine_coins": [ + ( + expected_request, + expected_tx_config, + test_condition_valid_times, + ), ( expected_request, expected_tx_config, diff --git a/chia/_tests/wallet/rpc/test_wallet_rpc.py b/chia/_tests/wallet/rpc/test_wallet_rpc.py index 98b829a93008..49778d83013b 100644 --- a/chia/_tests/wallet/rpc/test_wallet_rpc.py +++ b/chia/_tests/wallet/rpc/test_wallet_rpc.py @@ -3022,3 +3022,85 @@ async def test_combine_coins(wallet_environments: WalletTestFramework) -> None: ) ] ) + + +@pytest.mark.parametrize( + "wallet_environments", + [ + { + "num_environments": 1, + "blocks_needed": [2], + "trusted": True, # irrelevant + "reuse_puzhash": True, # irrelevant + } + ], + indirect=True, +) +@pytest.mark.limit_consensus_modes(reason="irrelevant") +@pytest.mark.anyio +async def test_fee_bigger_than_selection_coin_combining(wallet_environments: WalletTestFramework) -> None: + """ + This tests the case where the coins we would otherwise select are not enough to pay the fee. + """ + + env = wallet_environments.environments[0] + env.wallet_aliases = { + "xch": 1, + "cat": 2, + } + + # Should have 4 coins, two 1.75 XCH, two 0.25 XCH + + # Grab one of the 0.25 ones to specify + async with env.wallet_state_manager.new_action_scope(wallet_environments.tx_config) as action_scope: + target_coin = list(await env.xch_wallet.select_coins(uint64(250_000_000_000), action_scope))[0] + assert target_coin.amount == 250_000_000_000 + + fee = uint64(1_750_000_000_000) + # Under standard circumstances we would select the small coins, but this is not enough to pay the fee + # Instead, we will grab the big coin first and combine it with one of the smaller coins + xch_combine_request = CombineCoins( + wallet_id=uint32(1), + number_of_coins=uint16(2), + fee=fee, + largest_first=False, + push=True, + ) + + # First test an error where fee selection causes too many coins to be selected + with pytest.raises(ResponseFailureError, match="without selecting more coins than specified: 3"): + await env.rpc_client.combine_coins( + dataclasses.replace(xch_combine_request, fee=uint64(2_250_000_000_000)), + wallet_environments.tx_config, + ) + + await env.rpc_client.combine_coins( + xch_combine_request, + wallet_environments.tx_config, + ) + + await wallet_environments.process_pending_states( + [ + WalletStateTransition( + pre_block_balance_updates={ + "xch": { + "unconfirmed_wallet_balance": -fee, + "spendable_balance": -2_000_000_000_000, + "pending_change": 250_000_000_000, + "max_send_amount": -2_000_000_000_000, + "pending_coin_removal_count": 2, + } + }, + post_block_balance_updates={ + "xch": { + "confirmed_wallet_balance": -fee, + "spendable_balance": 250_000_000_000, + "pending_change": -250_000_000_000, + "max_send_amount": 250_000_000_000, + "pending_coin_removal_count": -2, + "unspent_coin_count": -1, # combine 2 into 1 + } + }, + ) + ] + ) diff --git a/chia/cmds/coin_funcs.py b/chia/cmds/coin_funcs.py index 35b321326a23..21b3d5859f91 100644 --- a/chia/cmds/coin_funcs.py +++ b/chia/cmds/coin_funcs.py @@ -128,6 +128,7 @@ async def async_combine( largest_first: bool, push: bool, condition_valid_times: ConditionValidTimes, + override: bool, ) -> List[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fingerprint) as (wallet_client, fingerprint, config): try: @@ -167,6 +168,14 @@ async def async_combine( timelock_info=condition_valid_times, ) + if ( + not override + and wallet_id == 1 + and fee >= sum(coin.amount for tx in resp.transactions for coin in tx.removals) + ): + print("Fee is >= the amount of coins selected. To continue, please use --override flag.") + return [] + print(f"Transactions would combine up to {number_of_coins} coins.") if push: cli_confirm("Would you like to Continue? (y/n): ") diff --git a/chia/cmds/coins.py b/chia/cmds/coins.py index 5593f5d47b8a..a811985ba29a 100644 --- a/chia/cmds/coins.py +++ b/chia/cmds/coins.py @@ -108,6 +108,7 @@ def list_cmd( default=False, help="Sort coins from largest to smallest or smallest to largest.", ) +@click.option("--override", help="Submits transaction without checking for unusual values", is_flag=True, default=False) @tx_out_cmd() def combine_cmd( wallet_rpc_port: Optional[int], @@ -125,6 +126,7 @@ def combine_cmd( reuse: bool, push: bool, condition_valid_times: ConditionValidTimes, + override: bool, ) -> List[TransactionRecord]: from .coin_funcs import async_combine @@ -145,6 +147,7 @@ def combine_cmd( largest_first=largest_first, push=push, condition_valid_times=condition_valid_times, + override=override, ) ) diff --git a/chia/rpc/wallet_rpc_api.py b/chia/rpc/wallet_rpc_api.py index 7bb2f2749481..5d428c4eb48a 100644 --- a/chia/rpc/wallet_rpc_api.py +++ b/chia/rpc/wallet_rpc_api.py @@ -1204,18 +1204,22 @@ async def combine_coins( async with action_scope.use() as interface: interface.side_effects.selected_coins.extend(coins) - # Next let's select enough coins to meet the target if there is one - if request.target_coin_amount is not None: - fungible_amount_needed = request.target_coin_amount - if isinstance(wallet, Wallet): - fungible_amount_needed = uint64(request.target_coin_amount + request.fee) - amount_selected = sum(c.amount for c in coins) - if amount_selected < fungible_amount_needed: - coins.extend( - await wallet.select_coins( - amount=uint64(fungible_amount_needed - amount_selected), action_scope=action_scope - ) + # Next let's select enough coins to meet the target + fee if there is one + fungible_amount_needed = uint64(0) if request.target_coin_amount is None else request.target_coin_amount + if isinstance(wallet, Wallet): + fungible_amount_needed = uint64(fungible_amount_needed + request.fee) + amount_selected = sum(c.amount for c in coins) + if amount_selected < fungible_amount_needed: # implicit fungible_amount_needed > 0 here + coins.extend( + await wallet.select_coins( + amount=uint64(fungible_amount_needed - amount_selected), action_scope=action_scope ) + ) + + if len(coins) > request.number_of_coins: + raise ValueError( + f"Options specified cannot be met without selecting more coins than specified: {len(coins)}" + ) # Now let's select enough coins to get to the target number to combine if len(coins) < request.number_of_coins: @@ -1243,6 +1247,7 @@ async def combine_coins( uint64(sum(c.amount for c in coins)) if request.target_coin_amount is None else request.target_coin_amount ) if isinstance(wallet, Wallet): + primary_output_amount = uint64(primary_output_amount - request.fee) await wallet.generate_signed_transaction( primary_output_amount, await wallet.get_puzzle_hash(new=action_scope.config.tx_config.reuse_puzhash), From 8e39d5e5d7fcd637a02de82a8f426fe5a9c5127e Mon Sep 17 00:00:00 2001 From: Amine Khaldi Date: Tue, 8 Oct 2024 22:17:51 +0100 Subject: [PATCH 31/69] CHIA-1602 Remove no longer needed TestNetwork class (#18682) Remove no longer needed TestNetwork class. --- chia/_tests/util/test_network.py | 52 +++++++++++++++----------------- 1 file changed, 25 insertions(+), 27 deletions(-) diff --git a/chia/_tests/util/test_network.py b/chia/_tests/util/test_network.py index 17998a86a097..69aebe6cb15e 100644 --- a/chia/_tests/util/test_network.py +++ b/chia/_tests/util/test_network.py @@ -10,35 +10,33 @@ from chia.util.network import IPAddress, resolve -class TestNetwork: - @pytest.mark.anyio - async def test_resolve4(self): - # Run these tests forcing IPv4 resolution - prefer_ipv6 = False - assert await resolve("127.0.0.1", prefer_ipv6=prefer_ipv6) == IPAddress.create("127.0.0.1") - assert await resolve("10.11.12.13", prefer_ipv6=prefer_ipv6) == IPAddress.create("10.11.12.13") - assert await resolve("localhost", prefer_ipv6=prefer_ipv6) == IPAddress.create("127.0.0.1") - assert await resolve("example.net", prefer_ipv6=prefer_ipv6) == IPAddress.create("93.184.215.14") +@pytest.mark.anyio +async def test_resolve4(): + # Run these tests forcing IPv4 resolution + prefer_ipv6 = False + assert await resolve("127.0.0.1", prefer_ipv6=prefer_ipv6) == IPAddress.create("127.0.0.1") + assert await resolve("10.11.12.13", prefer_ipv6=prefer_ipv6) == IPAddress.create("10.11.12.13") + assert await resolve("localhost", prefer_ipv6=prefer_ipv6) == IPAddress.create("127.0.0.1") + assert await resolve("example.net", prefer_ipv6=prefer_ipv6) == IPAddress.create("93.184.215.14") - @pytest.mark.anyio - @pytest.mark.skipif( - condition=("GITHUB_ACTIONS" in os.environ) and (sys.platform in {"darwin", "win32"}), - reason="macOS and Windows runners in GitHub Actions do not seem to support IPv6", + +@pytest.mark.anyio +@pytest.mark.skipif( + condition=("GITHUB_ACTIONS" in os.environ) and (sys.platform in {"darwin", "win32"}), + reason="macOS and Windows runners in GitHub Actions do not seem to support IPv6", +) +async def test_resolve6(): + # Run these tests forcing IPv6 resolution + prefer_ipv6 = True + assert await resolve("::1", prefer_ipv6=prefer_ipv6) == IPAddress.create("::1") + assert await resolve("2000:1000::1234:abcd", prefer_ipv6=prefer_ipv6) == IPAddress.create("2000:1000::1234:abcd") + # ip6-localhost is not always available, and localhost is IPv4 only + # on some systems. Just test neither here. + # assert await resolve("ip6-localhost", prefer_ipv6=prefer_ipv6) == IPAddress.create("::1") + # assert await resolve("localhost", prefer_ipv6=prefer_ipv6) == IPAddress.create("::1") + assert await resolve("example.net", prefer_ipv6=prefer_ipv6) == IPAddress.create( + "2606:2800:21f:cb07:6820:80da:af6b:8b2c" ) - async def test_resolve6(self): - # Run these tests forcing IPv6 resolution - prefer_ipv6 = True - assert await resolve("::1", prefer_ipv6=prefer_ipv6) == IPAddress.create("::1") - assert await resolve("2000:1000::1234:abcd", prefer_ipv6=prefer_ipv6) == IPAddress.create( - "2000:1000::1234:abcd" - ) - # ip6-localhost is not always available, and localhost is IPv4 only - # on some systems. Just test neither here. - # assert await resolve("ip6-localhost", prefer_ipv6=prefer_ipv6) == IPAddress.create("::1") - # assert await resolve("localhost", prefer_ipv6=prefer_ipv6) == IPAddress.create("::1") - assert await resolve("example.net", prefer_ipv6=prefer_ipv6) == IPAddress.create( - "2606:2800:21f:cb07:6820:80da:af6b:8b2c" - ) @pytest.mark.parametrize( From 221c6e67715839dc0fcfe4cf5d0de4aae59c8cd7 Mon Sep 17 00:00:00 2001 From: Amine Khaldi Date: Tue, 8 Oct 2024 22:18:02 +0100 Subject: [PATCH 32/69] CHIA-1593 Annotate test_cached_bls.py (#18678) Annotate test_cached_bls.py. --- chia/_tests/core/util/test_cached_bls.py | 6 +++--- mypy-exclusions.txt | 1 - 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/chia/_tests/core/util/test_cached_bls.py b/chia/_tests/core/util/test_cached_bls.py index d9553349c1a4..ecacbcac9894 100644 --- a/chia/_tests/core/util/test_cached_bls.py +++ b/chia/_tests/core/util/test_cached_bls.py @@ -4,10 +4,10 @@ from chia.util.hash import std_hash -LOCAL_CACHE = BLSCache(50000) +LOCAL_CACHE = BLSCache(50_000) -def test_cached_bls(): +def test_cached_bls() -> None: n_keys = 10 seed = b"a" * 31 sks = [AugSchemeMPL.key_gen(seed + bytes([i])) for i in range(n_keys)] @@ -42,7 +42,7 @@ def test_cached_bls(): assert local_cache.aggregate_verify(pks, msgs, agg_sig) -def test_cached_bls_repeat_pk(): +def test_cached_bls_repeat_pk() -> None: n_keys = 400 seed = b"a" * 32 sks = [AugSchemeMPL.key_gen(seed) for i in range(n_keys)] + [AugSchemeMPL.key_gen(std_hash(seed))] diff --git a/mypy-exclusions.txt b/mypy-exclusions.txt index 0c0cc8136bf9..eddedd786fe2 100644 --- a/mypy-exclusions.txt +++ b/mypy-exclusions.txt @@ -70,7 +70,6 @@ chia._tests.core.test_daemon_rpc chia._tests.core.test_db_conversion chia._tests.core.test_filter chia._tests.core.test_full_node_rpc -chia._tests.core.util.test_cached_bls chia._tests.core.util.test_config chia._tests.core.util.test_file_keyring_synchronization chia._tests.core.util.test_files From 5f01fccdc6cee68b8c49fdb4c30f0aa7eb7c3e3d Mon Sep 17 00:00:00 2001 From: Arvid Norberg Date: Thu, 10 Oct 2024 22:56:33 +0200 Subject: [PATCH 33/69] [CHIA-598] virtual project structure (#18616) slightly modified subset of Quexington's virtual project structure. All files default to be treated as belonging to chia-blockchain project. No default annotations. no default exclusions. --- .pre-commit-config.yaml | 7 + chia/_tests/tools/test_virtual_project.py | 591 ++++++++++++++++++++++ chia/util/virtual_project_analysis.py | 532 +++++++++++++++++++ virtual_project.yaml | 5 + 4 files changed, 1135 insertions(+) create mode 100644 chia/_tests/tools/test_virtual_project.py create mode 100644 chia/util/virtual_project_analysis.py create mode 100644 virtual_project.yaml diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index efceeee634f3..7e7afd4e9dc8 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -71,6 +71,13 @@ repos: entry: ./activated.py python tools/chialispp.py . language: system pass_filenames: false + - repo: local + hooks: + - id: virtual_project_dependency_cycles + name: Check for dependency cycles in project packages + entry: ./activated.py python chia/util/virtual_project_analysis.py print_cycles --directory chia --config virtual_project.yaml + language: system + pass_filenames: false - repo: local hooks: - id: build mypy.ini diff --git a/chia/_tests/tools/test_virtual_project.py b/chia/_tests/tools/test_virtual_project.py new file mode 100644 index 000000000000..1b4bc5d37d25 --- /dev/null +++ b/chia/_tests/tools/test_virtual_project.py @@ -0,0 +1,591 @@ +from __future__ import annotations + +import textwrap +from pathlib import Path +from typing import Any, Callable, Dict, List + +import click +import pytest +import yaml +from click.testing import CliRunner + +from chia.util.virtual_project_analysis import ( + Annotation, + ChiaFile, + Config, + DirectoryParameters, + File, + Package, + build_dependency_graph, + build_virtual_dependency_graph, + cli, + config, + find_cycles, + parse_file_or_package, +) + + +@pytest.mark.parametrize( + "file_string, expected, annotated", + [ + ("# Package: example\n# Some other comment", "example", True), + ("# Some comment\n# Some other comment", "chia-blockchain", False), + ], +) +def test_parse_annotation(file_string: str, expected: str, annotated: bool) -> None: + """ + Test that parse returns an Annotation instance for a valid annotation or + raises ValueError for an invalid one. + """ + annotation = Annotation.parse(file_string) + assert isinstance(annotation, Annotation) + assert annotation.package == expected + assert annotation.is_annotated == annotated + + +# Temporary directory fixture to create test files +@pytest.fixture +def create_test_file(tmp_path: Path) -> Callable[[str, str], Path]: + def _create_test_file(name: str, content: str) -> Path: + file_path = tmp_path / name + file_path.write_text(content, encoding="utf-8") + return file_path + + return _create_test_file + + +def test_parse_with_annotation(create_test_file: Callable[[str, str], Path]) -> None: + """Test parsing a file that contains a valid annotation.""" + file_content = "# Package: test_package\n# Some other comment" + test_file = create_test_file("annotated_file.txt", file_content) + + parsed_file = ChiaFile.parse(test_file) + + assert parsed_file.path == test_file + assert isinstance(parsed_file.annotations, Annotation) + assert parsed_file.annotations.package == "test_package" + + +def test_parse_without_annotation(create_test_file: Callable[[str, str], Path]) -> None: + """Test parsing a file that does not contain any annotations.""" + file_content = "# Some comment\n# Some other comment" + test_file = create_test_file("non_annotated_file.txt", file_content) + + parsed_file = ChiaFile.parse(test_file) + + assert parsed_file.path == test_file + assert not parsed_file.annotations.is_annotated + + +# This test is optional and can be adapted based on expected behavior for non-existent files +def test_parse_nonexistent_file() -> None: + """Test attempting to parse a non-existent file.""" + with pytest.raises(FileNotFoundError): + ChiaFile.parse(Path("/path/to/nonexistent/file.txt")) + + +# Helper function to create a non-empty Python file +def create_python_file(dir_path: Path, name: str, content: str) -> Path: + file_path = dir_path / name + file_path.write_text(content, encoding="utf-8") + return file_path + + +# Helper function to create an empty Python file +def create_empty_python_file(dir_path: Path, name: str) -> Path: + file_path = dir_path / name + file_path.touch() + return file_path + + +def test_gather_non_empty_python_files(tmp_path: Path) -> None: + # Set up directory structure + dir_path = tmp_path / "test_dir" + dir_path.mkdir() + excluded_dir = tmp_path / "excluded_dir" + excluded_dir.mkdir() + + # Create test files + non_empty_file = create_python_file(dir_path, "non_empty.py", "print('Hello World')") + create_empty_python_file(dir_path, "empty.py") + create_python_file(excluded_dir, "excluded.py", "print('Hello World')") + + # Initialize DirectoryParameters with excluded paths + dir_params = DirectoryParameters(dir_path=dir_path, excluded_paths=[excluded_dir]) + + # Perform the test + python_files = dir_params.gather_non_empty_python_files() + + # Assertions + assert len(python_files) == 1 # Only one non-empty Python file should be found + assert python_files[0].path == non_empty_file # The path of the gathered file should match the non-empty file + + +def test_gather_with_nested_directories_and_exclusions(tmp_path: Path) -> None: + # Set up directory structure + base_dir = tmp_path / "base_dir" + base_dir.mkdir() + nested_dir = base_dir / "nested_dir" + nested_dir.mkdir() + excluded_dir = base_dir / "excluded_dir" + excluded_dir.mkdir() + + # Create test files + nested_file = create_python_file(nested_dir, "nested.py", "print('Hello World')") + create_empty_python_file(nested_dir, "nested_empty.py") + create_python_file(excluded_dir, "excluded.py", "print('Hello World')") + + # Initialize DirectoryParameters without excluded paths + dir_params = DirectoryParameters(dir_path=base_dir, excluded_paths=[excluded_dir]) + + # Perform the test + python_files = dir_params.gather_non_empty_python_files() + + # Assertions + assert len(python_files) == 1 # Only the non-empty Python file in the nested directory should be found + assert python_files[0].path == nested_file # The path of the gathered file should match the nested non-empty file + + +def test_find_missing_annotations(tmp_path: Path) -> None: + # Set up directory structure + dir_path = tmp_path / "test_dir" + dir_path.mkdir() + + # Create test files + create_python_file(dir_path, "non_empty.py", "print('Hello World')") + + # Run the command + runner = CliRunner() + result = runner.invoke(cli, ["find_missing_annotations", "--directory", str(dir_path)]) + assert result.output == f"{dir_path / 'non_empty.py'}\n" + + # Rewrite file to have annotation + create_python_file(dir_path, "non_empty.py", "# Package: misc\n") + + # Run the command again with no results + result = runner.invoke(cli, ["find_missing_annotations", "--directory", str(dir_path)]) + assert result.output == "" + + +def test_parse_file_or_package() -> None: + assert parse_file_or_package("example.py") == File(Path("example.py")) + assert parse_file_or_package("example.py (extra info)") == File(Path("example.py")) + assert parse_file_or_package("(package_name)") == Package("package_name") + assert parse_file_or_package("package_name") == Package("package_name") + assert parse_file_or_package("package_name(") == Package("package_name(") + assert parse_file_or_package("(package_name") == Package("(package_name") + assert parse_file_or_package("package_name)") == Package("package_name)") + + +@pytest.fixture +def chia_package_structure(tmp_path: Path) -> Path: + base_dir = tmp_path / "chia_project" + base_dir.mkdir() + chia_dir = base_dir / "chia" + chia_dir.mkdir() + + # Create some files within the chia package + create_python_file(chia_dir, "module1.py", "def func1(): pass") + create_python_file(chia_dir, "module2.py", "def func2(): pass\nfrom chia.module1 import func1") + create_python_file(chia_dir, "module3.py", "def func3(): pass\nimport chia.module2") + + return chia_dir + + +def test_build_dependency_graph(chia_package_structure: Path) -> None: + chia_dir = chia_package_structure + dir_params = DirectoryParameters(dir_path=chia_dir) + graph = build_dependency_graph(dir_params) + assert chia_dir / "module1.py" in graph + assert chia_dir / "module2.py" in graph + assert chia_dir / "module3.py" in graph + assert chia_dir / "module1.py" in graph[chia_dir / "module2.py"] + assert chia_dir / "module2.py" in graph[chia_dir / "module3.py"] + + +def test_print_dependency_graph(chia_package_structure: Path) -> None: + # Run the command + runner = CliRunner() + result = runner.invoke(cli, ["print_dependency_graph", "--directory", str(chia_package_structure)]) + assert "module1.py" in result.output + assert "module2.py" in result.output + assert "module3.py" in result.output + + +# Mock the build_dependency_graph function to control its output +def mock_build_dependency_graph(dir_params: DirectoryParameters) -> Dict[Path, List[Path]]: + return { + Path("/path/to/package1/module1.py"): [ + Path("/path/to/package2/module2.py"), + Path("/path/to/package3/module3.py"), + ], + Path("/path/to/package2/module2.py"): [], + Path("/path/to/package3/module3.py"): [Path("/path/to/package2/module2.py")], + } + + +# Helper function to simulate ChiaFile.parse for testing +def mock_chia_file_parse(path: Path) -> ChiaFile: + annotations_map = { + Path("/path/to/package1/module1.py"): Annotation("Package1", True), + Path("/path/to/package2/module2.py"): Annotation("Package2", True), + Path("/path/to/package3/module3.py"): Annotation("Package3", True), + } + return ChiaFile(path=Path(path), annotations=annotations_map.get(path, Annotation("chia-blockchain", False))) + + +@pytest.fixture +def prepare_mocks(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.setattr("chia.util.virtual_project_analysis.build_dependency_graph", mock_build_dependency_graph) + monkeypatch.setattr("chia.util.virtual_project_analysis.ChiaFile.parse", mock_chia_file_parse) + + +def test_build_virtual_dependency_graph(prepare_mocks: None) -> None: + dir_params = DirectoryParameters(dir_path=Path("/path/to/package1")) + virtual_graph = build_virtual_dependency_graph(dir_params) + + assert "Package2" in virtual_graph["Package1"] + assert "Package3" in virtual_graph["Package1"] + assert virtual_graph["Package2"] == [] + assert "Package2" in virtual_graph["Package3"] + + +def test_print_virtual_dependency_graph(tmp_path: Path) -> None: + chia_dir = tmp_path / "chia" + chia_dir.mkdir() + + # Create some files within the chia package + create_python_file(chia_dir, "module1.py", "# Package: one\ndef func1(): pass") + create_python_file(chia_dir, "module2.py", "# Package: two\ndef func2(): pass\nfrom chia.module1 import func1") + create_python_file(chia_dir, "module3.py", "# Package: three\ndef func3(): pass\nimport chia.module2") + + # Run the command + runner = CliRunner() + result = runner.invoke(cli, ["print_virtual_dependency_graph", "--directory", str(chia_dir)]) + assert "one" in result.output + assert "two" in result.output + assert "three" in result.output + + +# Helper function to simulate ChiaFile.parse for testing +def mock_chia_file_parse2(path: Path) -> ChiaFile: + annotations_map = { + Path("/path/to/package1/module1.py"): Annotation("Package1", True), + Path("/path/to/package2/module2.py"): Annotation("Package2", True), + Path("/path/to/package3/module3.py"): Annotation("Package1", True), + } + return ChiaFile(path=Path(path), annotations=annotations_map.get(path, Annotation("chia-blockchain", False))) + + +@pytest.fixture +def prepare_mocks2(monkeypatch: pytest.MonkeyPatch) -> None: + monkeypatch.setattr("chia.util.virtual_project_analysis.ChiaFile.parse", mock_chia_file_parse2) + + +def test_cycle_detection(prepare_mocks2: None) -> None: + # Example graph with a simple cycle + graph: Dict[Path, List[Path]] = { + Path("/path/to/package1/module1.py"): [Path("/path/to/package2/module2.py")], + Path("/path/to/package2/module2.py"): [Path("/path/to/package3/module3.py")], # Cycle here + Path("/path/to/package3/module3.py"): [], + } + cycles = find_cycles( + graph, + build_virtual_dependency_graph(DirectoryParameters(dir_path=Path("path")), existing_graph=graph), + excluded_paths=[], + ignore_cycles_in=[], + ignore_specific_files=[], + ignore_specific_edges=[], + ) + # \path\to\package1\module1.py (Package1) -> \path\to\package2\module2.py (Package2) -> (Package1) + # \path\to\package2\module2.py (Package2) -> \path\to\package3\module3.py (Package1) -> (Package2) + assert len(cycles) == 2 + + +def test_print_cycles(tmp_path: Path) -> None: + chia_dir = tmp_path / "chia" + chia_dir.mkdir() + + # Create some files within the chia package + create_python_file(chia_dir, "module1.py", "# Package: one\ndef func1(): pass\nfrom chia.module2 import func2") + create_python_file(chia_dir, "module2.py", "# Package: two\ndef func2(): pass\nfrom chia.module3 import func3") + create_python_file(chia_dir, "module3.py", "# Package: one\ndef func3(): pass\n") + + # Run the command + runner = CliRunner() + result = runner.invoke(cli, ["print_cycles", "--directory", str(chia_dir)]) + assert "module1.py (one) -> " in result.output + + +def test_check_config(tmp_path: Path) -> None: + chia_dir = tmp_path / "chia" + chia_dir.mkdir() + + # Create some files within the chia package + create_python_file( + chia_dir, + "module1.py", + textwrap.dedent( + """ + # Package: one + def func1(): pass + from chia.module2 import func2 + """ + ), + ) + create_python_file( + chia_dir, + "module1b.py", + textwrap.dedent( + """ + # Package: one + def func1b(): pass + """ + ), + ) + create_python_file( + chia_dir, + "module2.py", + textwrap.dedent( + """ + # Package: two + def func2(): pass + from chia.module3 import func3 + from chia.module1b import func1b + """ + ), + ) + create_python_file( + chia_dir, + "module3.py", + textwrap.dedent( + """ + # Package: three + def func3(): pass + """ + ), + ) + + # Run the command + runner = CliRunner() + result = runner.invoke( + cli, + [ + "check_config", + "--directory", + str(chia_dir), + "--ignore-cycles-in", + "three", + "--ignore-specific-file", + str(chia_dir / "module3.py"), + "--ignore-specific-edge", + str(chia_dir / "module2.py") + " -> " + str(chia_dir / "module3.py"), + "--ignore-specific-edge", + str(chia_dir / "module2.py") + " -> " + str(chia_dir / "module1b.py"), + ], + ) + assert " module three ignored but no cycles were found" in result.output + assert f" file {str(chia_dir / 'module3.py')} ignored but no cycles were found" in result.output + assert ( + f"edge {str(chia_dir / 'module2.py') + ' -> ' + str(chia_dir / 'module3.py')} ignored but no cycles were found" + in result.output + ) + assert ( + f"edge {str(chia_dir / 'module2.py') + ' -> ' + str(chia_dir / 'module1b.py')} ignored but no cycles were found" + not in result.output + ) + + +def test_excluded_paths_handling(prepare_mocks2: None) -> None: + # Graph where module2.py is excluded + graph = { + Path("/path/to/package1/module1.py"): [Path("/path/to/package2/module2.py")], + Path("/path/to/package2/module2.py"): [Path("/path/to/package1/module1.py")], + } + cycles = find_cycles( + graph, + build_virtual_dependency_graph(DirectoryParameters(dir_path=Path("path")), existing_graph=graph), + excluded_paths=[Path("/path/to/package2/module2.py")], + ignore_cycles_in=[], + ignore_specific_files=[], + ignore_specific_edges=[], + ) + assert len(cycles) == 0 # No cycles due to exclusion + + +def test_ignore_cycles_in_specific_packages(prepare_mocks2: None) -> None: + graph: Dict[Path, List[Path]] = { + Path("/path/to/package1/module1.py"): [Path("/path/to/package2/module2.py")], + Path("/path/to/package2/module2.py"): [Path("/path/to/package3/module3.py")], + Path("/path/to/package3/module3.py"): [], + } + # Assuming module1.py and module3.py belong to Package1, which is ignored + cycles = find_cycles( + graph, + build_virtual_dependency_graph(DirectoryParameters(dir_path=Path("path")), existing_graph=graph), + excluded_paths=[], + ignore_cycles_in=["Package1"], + ignore_specific_files=[], + ignore_specific_edges=[], + ) + assert len(cycles) == 1 # Cycles in Package1 are ignored + + +def test_ignore_cycles_with_specific_edges(monkeypatch: pytest.MonkeyPatch) -> None: + def _mock_chia_file_parse(path: Path) -> ChiaFile: + annotations_map = { + Path("/path/to/package1/module1a.py"): Annotation("Package1", True), + Path("/path/to/package2/module2.py"): Annotation("Package2", True), + Path("/path/to/package3/module3.py"): Annotation("Package3", True), + Path("/path/to/package1/module1b.py"): Annotation("Package1", True), + } + return ChiaFile(path=Path(path), annotations=annotations_map.get(path, Annotation("chia-blockchain", False))) + + monkeypatch.setattr("chia.util.virtual_project_analysis.ChiaFile.parse", _mock_chia_file_parse) + + graph = { + Path("/path/to/package1/module1a.py"): [Path("/path/to/package2/module2.py")], + Path("/path/to/package2/module2.py"): [Path("/path/to/package3/module3.py")], + Path("/path/to/package3/module3.py"): [Path("/path/to/package1/module1b.py")], + Path("/path/to/package1/module1b.py"): [], + } + virtual_graph = build_virtual_dependency_graph(DirectoryParameters(dir_path=Path("path")), existing_graph=graph) + cycles = find_cycles( + graph, + virtual_graph, + excluded_paths=[], + ignore_cycles_in=[], + ignore_specific_files=[], + ignore_specific_edges=[ + (File(Path("/path/to/package3/module3.py")), File(Path("/path/to/package2/module2.py"))), + (Package("Package3"), Package("Package2")), + ], + ) + assert len(cycles) == 0 + + +def test_ignore_cycles_with_specific_files(prepare_mocks2: None) -> None: + graph = { + Path("/path/to/package1/module1.py"): [Path("/path/to/package2/module2.py")], + Path("/path/to/package2/module2.py"): [Path("/path/to/package3/module3.py")], # Cycle here + } + cycles = find_cycles( + graph, + build_virtual_dependency_graph(DirectoryParameters(dir_path=Path("path")), existing_graph=graph), + excluded_paths=[], + ignore_cycles_in=[], + ignore_specific_files=[Path("/path/to/package1/module1.py"), Path("/path/to/package2/module2.py")], + ignore_specific_edges=[], + ) + assert len(cycles) == 0 + + +# Sample function to use with the decorator for testing +@click.command("blah") +@config +def sample_function(config: Config) -> None: + print(config) + + +# Helper function to create a temporary YAML configuration file +@pytest.fixture +def create_yaml_config(tmp_path: Path) -> Callable[[Dict[str, Any]], Path]: + def _create_yaml_config(content: Dict[str, Any]) -> Path: + path = tmp_path / "config.yaml" + with open(path, "w") as f: + yaml.dump(content, f) + return path + + return _create_yaml_config + + +def test_config_with_yaml(create_yaml_config: Callable[[Dict[str, Any]], Path]) -> None: + # Create a temporary YAML configuration file + yaml_config = { + "exclude_paths": ["path/to/exclude"], + "ignore": { + "packages": ["ignored.package"], + "files": ["ignored_file.py"], + "edges": ["ignored_parent -> ignored_child"], + }, + } + config_path = create_yaml_config(yaml_config) + + runner = CliRunner() + + # Invoke the CLI with the --config option + result = runner.invoke(sample_function, ["--directory", ".", "--config", str(config_path)]) + + # Check if the command ran successfully + assert result.exit_code == 0 + + # Verify the config object created by the decorator + config = result.output + path_type = config_path.__class__.__name__ + assert config == ( + "Config(" + "directory_parameters=DirectoryParameters(" + f"dir_path={path_type}('.'), " + f"excluded_paths=[{path_type}('path/to/exclude')]" + "), " + "ignore_cycles_in=['ignored.package'], " + f"ignore_specific_files=[{path_type}('ignored_file.py')], " + f"ignore_specific_edges=[(Package(name='ignored_child', is_file=False), " + f"Package(name='ignored_parent', is_file=False))]" + ")\n" + ) + + +def test_parse_edges(tmp_path: Path) -> None: + chia_dir = tmp_path / "chia" + chia_dir.mkdir() + + # Create some files within the chia package + create_python_file( + chia_dir, + "module1.py", + textwrap.dedent( + """ + # Package: one + def func1(): pass + from chia.module2 import func2 + from chia.module3 import func3 + """ + ), + ) + create_python_file( + chia_dir, + "module2.py", + textwrap.dedent( + """ + # Package: two + def func2(): pass + """ + ), + ) + create_python_file( + chia_dir, + "module3.py", + textwrap.dedent( + """ + # Package: three + def func3(): pass + """ + ), + ) + + # Run the command + runner = CliRunner() + result = runner.invoke( + cli, + [ + "print_edges", + "--directory", + str(chia_dir), + "--dependent-package", + "one", + "--provider-package", + "two", + ], + ) + assert result.output.strip() == f"{str(chia_dir / 'module1.py')} (one) -> {str(chia_dir / 'module2.py')} (two)" diff --git a/chia/util/virtual_project_analysis.py b/chia/util/virtual_project_analysis.py new file mode 100644 index 000000000000..2b6c31c13e8a --- /dev/null +++ b/chia/util/virtual_project_analysis.py @@ -0,0 +1,532 @@ +from __future__ import annotations + +import ast +import json +import os +import re +import sys +from dataclasses import dataclass, field +from pathlib import Path +from typing import Any, Callable, Dict, List, Literal, Optional, Tuple, Union + +import click +import yaml + +# This tool enforces digraph dependencies within a "virtual project structure". +# i.e. files grouped together forming a project are not allowed to have cyclical +# dependencies on other such groups. + +# by default, all files are considered part of the "chia-blockchain" project. + +# To pull out a sub project, annotate its files with a comment (on the first +# line): +# Package: + +# if chia-blockchain depends on this new sub-project, the sub-project may not +# depend back on chia-blockchain. + + +@dataclass(frozen=True) +class Annotation: + package: str + is_annotated: bool + + @classmethod + def parse(cls, file_string: str) -> Annotation: + result = re.search(r"^# Package: (.+)$", file_string, re.MULTILINE) + if result is None: + return cls("chia-blockchain", False) + + return cls(result.group(1).strip(), True) + + +@dataclass(frozen=True) +class ChiaFile: + path: Path + annotations: Annotation + + @classmethod + def parse(cls, file_path: Path) -> ChiaFile: + with open(file_path, encoding="utf-8", errors="ignore") as f: + file_string = f.read().strip() + return cls(file_path, Annotation.parse(file_string)) + + +def build_dependency_graph(dir_params: DirectoryParameters) -> Dict[Path, List[Path]]: + dependency_graph: Dict[Path, List[Path]] = {} + for chia_file in dir_params.gather_non_empty_python_files(): + dependency_graph[chia_file.path] = [] + with open(chia_file.path, encoding="utf-8", errors="ignore") as f: + filestring = f.read() + tree = ast.parse(filestring, filename=chia_file.path) + for node in ast.iter_child_nodes(tree): + if isinstance(node, ast.ImportFrom): + if node.module is not None and node.module.startswith(dir_params.dir_path.stem): + imported_path = os.path.join(dir_params.dir_path.parent, node.module.replace(".", "/") + ".py") + paths_to_search = [ + imported_path, + *(os.path.join(imported_path[:-3], alias.name + ".py") for alias in node.names), + ] + for path_to_search in paths_to_search: + if os.path.exists(path_to_search): + dependency_graph[chia_file.path].append(Path(path_to_search)) + elif isinstance(node, ast.Import): + for alias in node.names: + if alias.name.startswith(dir_params.dir_path.stem): + imported_path = os.path.join( + dir_params.dir_path.parent, alias.name.replace(".", "/") + ".py" + ) + if os.path.exists(imported_path): + dependency_graph[chia_file.path].append(Path(imported_path)) + return dependency_graph + + +def build_virtual_dependency_graph( + dir_params: DirectoryParameters, *, existing_graph: Optional[Dict[Path, List[Path]]] = None +) -> Dict[str, List[str]]: + if existing_graph is None: + graph = build_dependency_graph(dir_params) + else: + graph = existing_graph + + virtual_graph: Dict[str, List[str]] = {} + for file, imports in graph.items(): + file_path = Path(file) + root_file = ChiaFile.parse(file_path) + if root_file.annotations is None: + continue + root = root_file.annotations.package + virtual_graph.setdefault(root, []) + + dependency_files = [ChiaFile.parse(Path(imp)) for imp in imports] + dependencies = [f.annotations.package for f in dependency_files if f.annotations is not None] + + virtual_graph[root].extend(dependencies) + + # Filter out self before returning the list + return {k: list({v for v in vs if v != k}) for k, vs in virtual_graph.items()} + + +@dataclass(frozen=True) +class Cycle: + dependent_path: Path + dependent_package: str + provider_path: Path + provider_package: str + packages_after_provider: List[str] + + def __repr__(self) -> str: + return "".join( + ( + f"{self.dependent_path} ({self.dependent_package}) -> ", + f"{self.provider_path} ({self.provider_package}) -> ", + *(f"({extra}) -> " for extra in self.packages_after_provider), + ) + )[:-4] + + def possible_edge_interpretations(self) -> List[Tuple[FileOrPackage, FileOrPackage]]: + edges_after_initial_files = [] + provider = self.packages_after_provider[0] + for next_provider in self.packages_after_provider[1:]: + edges_after_initial_files.append((Package(next_provider), Package(provider))) + provider = next_provider + + return [ + # Dependent -> Provider + (File(self.provider_path), File(self.dependent_path)), + (Package(self.provider_package), File(self.dependent_path)), + (File(self.provider_path), Package(self.dependent_package)), + (Package(self.provider_package), Package(self.dependent_package)), + # Provider -> Dependent/Other Packages + (Package(self.packages_after_provider[0]), File(self.provider_path)), + (Package(self.packages_after_provider[0]), Package(self.provider_package)), + # the rest + *edges_after_initial_files, + ] + + +def find_all_dependency_paths(dependency_graph: Dict[str, List[str]], start: str, end: str) -> List[List[str]]: + all_paths = [] + visited = set() + + def dfs(current: str, target: str, path: List[str]) -> None: + if current in visited: + return + if current == target and len(path) > 0: + all_paths.append(path[1:] + [current]) + return + visited.add(current) + for provider in sorted(dependency_graph.get(current, [])): + dfs(provider, target, path + [current]) + + dfs(start, end, []) + return all_paths + + +def find_cycles( + graph: Dict[Path, List[Path]], + virtual_graph: Dict[str, List[str]], + excluded_paths: List[Path], + ignore_cycles_in: List[str], + ignore_specific_files: List[Path], + ignore_specific_edges: List[Tuple[FileOrPackage, FileOrPackage]], +) -> List[Cycle]: + # Initialize an accumulator for paths that are part of cycles. + path_accumulator = [] + # Iterate over each package (parent) in the graph. + for dependent in sorted(graph): + if dependent in excluded_paths: + continue + # Parse the parent package file. + dependent_file = ChiaFile.parse(dependent) + # Skip this package if it has no annotations or should be ignored in cycle detection. + if ( + dependent_file.annotations is None + or dependent_file.annotations.package in ignore_cycles_in + or dependent in ignore_specific_files + ): + continue + + for provider in sorted(graph[dependent]): + if provider in excluded_paths: + continue + provider_file = ChiaFile.parse(provider) + if ( + provider_file.annotations is None + or provider_file.annotations.package == dependent_file.annotations.package + ): + continue + + dependency_paths = find_all_dependency_paths( + virtual_graph, provider_file.annotations.package, dependent_file.annotations.package + ) + if dependency_paths is None: + continue + + for dependency_path in dependency_paths: + possible_cycle = Cycle( + dependent_file.path, + dependent_file.annotations.package, + provider_file.path, + provider_file.annotations.package, + dependency_path, + ) + + for edge in possible_cycle.possible_edge_interpretations(): + if edge in ignore_specific_edges: + break + else: + path_accumulator.append(possible_cycle) + + # Format and return the accumulated paths as strings showing the cycles. + return path_accumulator + + +def print_graph(graph: Union[Dict[str, List[str]], Dict[Path, List[Path]]]) -> None: + print(json.dumps({str(k): list(str(v) for v in vs) for k, vs in graph.items()}, indent=4)) + + +@click.group(help="A utility for grouping different parts of the repo into separate projects") +def cli() -> None: + pass + + +@dataclass(frozen=True) +class DirectoryParameters: + dir_path: Path + excluded_paths: List[Path] = field(default_factory=list) + + def gather_non_empty_python_files(self) -> List[ChiaFile]: + """ + Gathers non-empty Python files in the specified directory while + ignoring files and directories in the excluded paths. + + Returns: + A list of paths to non-empty Python files. + """ + python_files = [] + for root, dirs, files in os.walk(self.dir_path, topdown=True): + # Modify dirs in-place to remove excluded directories from search + dirs[:] = [d for d in dirs if Path(os.path.join(root, d)) not in self.excluded_paths] + + for file in files: + file_path = Path(os.path.join(root, file)) + # Check if the file is a Python file and not in the excluded paths + if file_path.suffix == ".py" and file_path not in self.excluded_paths: + # Check if the file is non-empty + if os.path.getsize(file_path) > 0: + python_files.append(ChiaFile.parse(file_path)) + + return python_files + + +@dataclass(frozen=True) +class Config: + directory_parameters: DirectoryParameters + ignore_cycles_in: List[str] + ignore_specific_files: List[Path] + ignore_specific_edges: List[Tuple[FileOrPackage, FileOrPackage]] # (parent, child) + + +@dataclass(frozen=True) +class File: + name: Path + is_file: Literal[True] = True + + +@dataclass(frozen=True) +class Package: + name: str + is_file: Literal[False] = False + + +FileOrPackage = Union[File, Package] + + +def parse_file_or_package(identifier: str) -> FileOrPackage: + if ".py" in identifier: + if "(" not in identifier: + return File(Path(identifier)) + else: + return File(Path(identifier.split("(")[0].strip())) + + if ".py" not in identifier and identifier[0] == "(" and identifier[-1] == ")": + return Package(identifier[1:-1]) # strip parens + + return Package(identifier) + + +def parse_edge(user_string: str) -> Tuple[FileOrPackage, FileOrPackage]: + split_string = user_string.split("->") + dependent_side = split_string[0].strip() + provider_side = split_string[1].strip() + + return parse_file_or_package(provider_side), parse_file_or_package(dependent_side) + + +def config(func: Callable[..., None]) -> Callable[..., None]: + @click.option( + "--directory", + "include_dir", + type=click.Path(exists=True, file_okay=False, dir_okay=True), + required=True, + help="The directory to include.", + ) + @click.option( + "--exclude-path", + "excluded_paths", + multiple=True, + type=click.Path(exists=False, file_okay=True, dir_okay=True), + help="Optional paths to exclude.", + ) + @click.option( + "--config", + "config_path", + type=click.Path(exists=True), + required=False, + default=None, + help="Path to the YAML configuration file.", + ) + def inner(config_path: Optional[str], *args: Any, **kwargs: Any) -> None: + exclude_paths = [] + ignore_cycles_in: List[str] = [] + ignore_specific_files: List[str] = [] + ignore_specific_edges: List[str] = [] + if config_path is not None: + # Reading from the YAML configuration file + with open(config_path) as file: + config_data = yaml.safe_load(file) + + # Extracting required configuration values + exclude_paths = [Path(p) for p in config_data.get("exclude_paths") or []] + ignore_cycles_in = config_data["ignore"].get("packages") or [] + ignore_specific_files = config_data["ignore"].get("files") or [] + ignore_specific_edges = config_data["ignore"].get("edges") or [] + + # Instantiate DirectoryParameters with the provided options + dir_params = DirectoryParameters( + dir_path=Path(kwargs.pop("include_dir")), + excluded_paths=[*(Path(p) for p in kwargs.pop("excluded_paths")), *exclude_paths], + ) + + # Make the ignored edge dictionary + ignore_specific_edges_graph = [] + for ignore in (*kwargs.pop("ignore_specific_edges", []), *ignore_specific_edges): + parent, child = parse_edge(ignore) + ignore_specific_edges_graph.append((parent, child)) + + # Instantiating the Config object + config = Config( + directory_parameters=dir_params, + ignore_cycles_in=[*kwargs.pop("ignore_cycles_in", []), *ignore_cycles_in], + ignore_specific_files=[Path(p) for p in (*kwargs.pop("ignore_specific_files", []), *ignore_specific_files)], + ignore_specific_edges=ignore_specific_edges_graph, + ) + + # Calling the wrapped function with the Config object and other arguments + return func(config, *args, **kwargs) + + return inner + + +@click.command("find_missing_annotations", short_help="Search a directory for chia files without annotations") +@config +def find_missing_annotations(config: Config) -> None: + flag = False + for file in config.directory_parameters.gather_non_empty_python_files(): + if not file.annotations.is_annotated: + print(file.path) + flag = True + + if flag: + sys.exit(1) + + +@click.command("print_dependency_graph", short_help="Output a dependency graph of all the files in a directory") +@config +def print_dependency_graph(config: Config) -> None: + print_graph(build_dependency_graph(config.directory_parameters)) + + +@click.command( + "print_virtual_dependency_graph", short_help="Output a dependency graph of all the packages in a directory" +) +@config +def print_virtual_dependency_graph(config: Config) -> None: + print_graph(build_virtual_dependency_graph(config.directory_parameters)) + + +@click.command("print_cycles", short_help="Output cycles found in the virtual dependency graph") +@click.option( + "--ignore-cycles-in", + "ignore_cycles_in", + multiple=True, + type=str, + help="Ignore dependency cycles in a package", +) +@click.option( + "--ignore-specific-file", + "ignore_specific_files", + multiple=True, + type=click.Path(exists=True, file_okay=True, dir_okay=False), + help="Ignore cycles involving specific files", +) +@click.option( + "--ignore-specific-edge", + "ignore_specific_edges", + multiple=True, + type=str, + help="Ignore specific problematic dependencies (format: path/to/file1 -> path/to/file2)", +) +@config +def print_cycles(config: Config) -> None: + flag = False + graph = build_dependency_graph(config.directory_parameters) + for cycle in find_cycles( + graph, + build_virtual_dependency_graph(config.directory_parameters, existing_graph=graph), + config.directory_parameters.excluded_paths, + config.ignore_cycles_in, + config.ignore_specific_files, + config.ignore_specific_edges, + ): + print(cycle) + flag = True + + if flag: + sys.exit(1) + + +@click.command("check_config", short_help="Check the config is as specific as it can be") +@click.option( + "--ignore-cycles-in", + "ignore_cycles_in", + multiple=True, + type=str, + help="Ignore dependency cycles in a package", +) +@click.option( + "--ignore-specific-file", + "ignore_specific_files", + multiple=True, + type=click.Path(exists=True, file_okay=True, dir_okay=False), + help="Ignore cycles involving specific files", +) +@click.option( + "--ignore-specific-edge", + "ignore_specific_edges", + multiple=True, + type=str, + help="Ignore specific problematic dependencies (format: path/to/file1 -> path/to/file2)", +) +@config +def check_config(config: Config) -> None: + graph = build_dependency_graph(config.directory_parameters) + cycles = find_cycles( + graph, + build_virtual_dependency_graph(config.directory_parameters, existing_graph=graph), + config.directory_parameters.excluded_paths, + [], + [], + [], + ) + modules_found = set() + files_found = set() + edges_found = set() + for cycle in cycles: + modules_found.add(cycle.dependent_package) + files_found.add(cycle.dependent_path) + edges_found.update(set(cycle.possible_edge_interpretations())) + + for module in config.ignore_cycles_in: + if module not in modules_found: + print(f" module {module} ignored but no cycles were found") + print() + for file in config.ignore_specific_files: + if file not in files_found: + print(f" file {file} ignored but no cycles were found") + print() + for edge in config.ignore_specific_edges: + if edge not in edges_found: + print(f" edge {edge[1].name} -> {edge[0].name} ignored but no cycles were found") + + +@click.command("print_edges", short_help="Check for all of the ways a package immediately depends on another") +@click.option( + "--dependent-package", + "from_package", + type=str, + help="The package that depends on the other", +) +@click.option( + "--provider-package", + "to_package", + type=str, + help="The package that the dependent package imports from", +) +@config +def print_edges(config: Config, from_package: str, to_package: str) -> None: + graph = build_dependency_graph(config.directory_parameters) + for dependent, providers in graph.items(): + dependent_file = ChiaFile.parse(dependent) + assert dependent_file.annotations is not None + if dependent_file.annotations.package == from_package: + for provider in providers: + provider_file = ChiaFile.parse(provider) + assert provider_file.annotations is not None + if provider_file.annotations.package == to_package: + print( + f"{dependent} ({dependent_file.annotations.package}) -> " + f"{provider} ({provider_file.annotations.package})" + ) + + +cli.add_command(find_missing_annotations) +cli.add_command(print_dependency_graph) +cli.add_command(print_virtual_dependency_graph) +cli.add_command(print_cycles) +cli.add_command(check_config) +cli.add_command(print_edges) + +if __name__ == "__main__": + cli() diff --git a/virtual_project.yaml b/virtual_project.yaml new file mode 100644 index 000000000000..0774d3b636e9 --- /dev/null +++ b/virtual_project.yaml @@ -0,0 +1,5 @@ +exclude_paths: +ignore: + packages: + files: + edges: From 0f45239718f8b2d9629e7801f873535c78c29337 Mon Sep 17 00:00:00 2001 From: Amine Khaldi Date: Thu, 10 Oct 2024 21:57:33 +0100 Subject: [PATCH 34/69] Update the anchor. --- chia-blockchain-gui | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/chia-blockchain-gui b/chia-blockchain-gui index 7e6f90ed1bbd..9e012a02c50d 160000 --- a/chia-blockchain-gui +++ b/chia-blockchain-gui @@ -1 +1 @@ -Subproject commit 7e6f90ed1bbda7dcc9181a7b424a82e4fb473901 +Subproject commit 9e012a02c50d6ddf0e96797d85eb183de91874e7 From 5041c8ce2dc181ae2fa0acea6602b159a427d259 Mon Sep 17 00:00:00 2001 From: William Allen Date: Fri, 11 Oct 2024 14:52:20 -0500 Subject: [PATCH 35/69] Fix install.sh upgrade issue (#18673) --- install.sh | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/install.sh b/install.sh index adcb88d8a01a..363c4074e24b 100755 --- a/install.sh +++ b/install.sh @@ -172,7 +172,18 @@ fi .penv/bin/poetry env use "${INSTALL_PYTHON_PATH}" # shellcheck disable=SC2086 .penv/bin/poetry install ${EXTRAS} -ln -s -f .venv venv + +if [ -e venv ]; then + if [ -d venv ] && [ ! -L venv ]; then + echo "The 'venv' directory already exists. Please delete it before installing." + exit 1 + elif [ -L venv ]; then + ln -sfn .venv venv + fi +else + ln -s .venv venv +fi + if [ ! -f "activate" ]; then ln -s venv/bin/activate . fi From 837f0e2d472cdec45d4eb880b908b43af8b58d5d Mon Sep 17 00:00:00 2001 From: Arvid Norberg Date: Fri, 11 Oct 2024 22:43:31 +0200 Subject: [PATCH 36/69] bump chia_rs dependency (#18695) bump chia_rs dependency and accomodate for changes to SpendBundleConditions and run_block_generator() --- .../core/full_node/test_generator_tools.py | 2 +- chia/_tests/core/mempool/test_mempool.py | 2 +- .../core/mempool/test_mempool_manager.py | 1 + .../test_fee_estimation_integration.py | 2 +- chia/_tests/util/test_condition_tools.py | 4 +- chia/full_node/mempool_check_conditions.py | 6 +- poetry.lock | 56 +++++++++---------- pyproject.toml | 2 +- tools/analyze-chain.py | 14 ++++- 9 files changed, 50 insertions(+), 39 deletions(-) diff --git a/chia/_tests/core/full_node/test_generator_tools.py b/chia/_tests/core/full_node/test_generator_tools.py index 14b1d9797286..f3b303f2100e 100644 --- a/chia/_tests/core/full_node/test_generator_tools.py +++ b/chia/_tests/core/full_node/test_generator_tools.py @@ -67,7 +67,7 @@ def test_tx_removals_and_additions() -> None: - conditions = SpendBundleConditions(spends, uint64(0), uint32(0), uint64(0), None, None, [], uint64(0), 0, 0) + conditions = SpendBundleConditions(spends, uint64(0), uint32(0), uint64(0), None, None, [], uint64(0), 0, 0, False) expected_rems = [coin_ids[0], coin_ids[1]] expected_additions = [] for spend in spends: diff --git a/chia/_tests/core/mempool/test_mempool.py b/chia/_tests/core/mempool/test_mempool.py index 0d49794cc396..ebd89da77799 100644 --- a/chia/_tests/core/mempool/test_mempool.py +++ b/chia/_tests/core/mempool/test_mempool.py @@ -107,7 +107,7 @@ def make_item( return MempoolItem( SpendBundle([], G2Element()), fee, - SpendBundleConditions([], 0, 0, 0, None, None, [], cost, 0, 0), + SpendBundleConditions([], 0, 0, 0, None, None, [], cost, 0, 0, False), spend_bundle_name, uint32(0), assert_height, diff --git a/chia/_tests/core/mempool/test_mempool_manager.py b/chia/_tests/core/mempool/test_mempool_manager.py index 4d5ac4677679..8e0796202aad 100644 --- a/chia/_tests/core/mempool/test_mempool_manager.py +++ b/chia/_tests/core/mempool/test_mempool_manager.py @@ -227,6 +227,7 @@ def make_test_conds( cost, 0, 0, + False, ) diff --git a/chia/_tests/fee_estimation/test_fee_estimation_integration.py b/chia/_tests/fee_estimation/test_fee_estimation_integration.py index 5ba901a5406e..80129ecf75e4 100644 --- a/chia/_tests/fee_estimation/test_fee_estimation_integration.py +++ b/chia/_tests/fee_estimation/test_fee_estimation_integration.py @@ -42,7 +42,7 @@ def make_mempoolitem() -> MempoolItem: fee = uint64(10000000) spends: List[SpendConditions] = [] - conds = SpendBundleConditions(spends, 0, 0, 0, None, None, [], cost, 0, 0) + conds = SpendBundleConditions(spends, 0, 0, 0, None, None, [], cost, 0, 0, False) mempool_item = MempoolItem( spend_bundle, fee, diff --git a/chia/_tests/util/test_condition_tools.py b/chia/_tests/util/test_condition_tools.py index 29e6b1f0c74a..1a939c38baf7 100644 --- a/chia/_tests/util/test_condition_tools.py +++ b/chia/_tests/util/test_condition_tools.py @@ -53,7 +53,7 @@ def mk_agg_sig_conditions( agg_sig_puzzle_amount=agg_sig_data if opcode == ConditionOpcode.AGG_SIG_PUZZLE_AMOUNT else [], flags=0, ) - return SpendBundleConditions([spend], 0, 0, 0, None, None, agg_sig_unsafe_data, 0, 0, 0) + return SpendBundleConditions([spend], 0, 0, 0, None, None, agg_sig_unsafe_data, 0, 0, 0, False) @pytest.mark.parametrize( @@ -100,7 +100,7 @@ def test_pkm_pairs_vs_for_conditions_dict(opcode: ConditionOpcode) -> None: class TestPkmPairs: def test_empty_list(self) -> None: - conds = SpendBundleConditions([], 0, 0, 0, None, None, [], 0, 0, 0) + conds = SpendBundleConditions([], 0, 0, 0, None, None, [], 0, 0, 0, False) pks, msgs = pkm_pairs(conds, b"foobar") assert pks == [] assert msgs == [] diff --git a/chia/full_node/mempool_check_conditions.py b/chia/full_node/mempool_check_conditions.py index a84df7f04ed2..57b2f8e5fb63 100644 --- a/chia/full_node/mempool_check_conditions.py +++ b/chia/full_node/mempool_check_conditions.py @@ -3,7 +3,7 @@ import logging from typing import Dict, List, Optional -from chia_rs import MEMPOOL_MODE, get_flags_for_height_and_constants +from chia_rs import DONT_VALIDATE_SIGNATURE, MEMPOOL_MODE, G2Element, get_flags_for_height_and_constants from chia_rs import get_puzzle_and_solution_for_coin2 as get_puzzle_and_solution_for_coin_rust from chia_rs import run_block_generator, run_block_generator2, run_chia_program @@ -36,7 +36,7 @@ def get_name_puzzle_conditions( height: uint32, constants: ConsensusConstants, ) -> NPCResult: - flags = get_flags_for_height_and_constants(height, constants) + flags = get_flags_for_height_and_constants(height, constants) | DONT_VALIDATE_SIGNATURE if mempool_mode: flags = flags | MEMPOOL_MODE @@ -48,7 +48,7 @@ def get_name_puzzle_conditions( try: block_args = generator.generator_refs - err, result = run_block(bytes(generator.program), block_args, max_cost, flags, constants) + err, result = run_block(bytes(generator.program), block_args, max_cost, flags, G2Element(), None, constants) assert (err is None) != (result is None) if err is not None: return NPCResult(uint16(err), None) diff --git a/poetry.lock b/poetry.lock index f29db50c4547..e1a7c3a0d7bd 100644 --- a/poetry.lock +++ b/poetry.lock @@ -796,37 +796,37 @@ dev = ["black (>=23.1.0)", "pytest (>=7.2.1)", "ruff (>=0.0.252)"] [[package]] name = "chia-rs" -version = "0.14.0" +version = "0.15.0" description = "Code useful for implementing chia consensus." optional = false python-versions = "*" files = [ - {file = "chia_rs-0.14.0-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:defa14a8a9532d2d0eb3b6b263ce6ad2c2c3ac5b37ff49e42a4202b1855d6ce9"}, - {file = "chia_rs-0.14.0-cp310-cp310-macosx_13_0_x86_64.whl", hash = "sha256:74724d50d18f48d3643e10308ab6b1ad98dbd47a136a9b293a4c985d91069b21"}, - {file = "chia_rs-0.14.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:dc1052c718dc930997b4ef50478d24973dad2b518ba0634347f7815b5b8f6643"}, - {file = "chia_rs-0.14.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:0aee2574d24c5db06a74cb0d69949f03575cdf33a7e7a8673cdab298bdf491a8"}, - {file = "chia_rs-0.14.0-cp310-none-win_amd64.whl", hash = "sha256:291a3821951c3505e1172c772ee329f75fe49961a52952d57fdd49eddf8ad22a"}, - {file = "chia_rs-0.14.0-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:4020b1086a8ab26aeee39be71c87b6e8c16481ce75eb82200d394f762ddbbc0b"}, - {file = "chia_rs-0.14.0-cp311-cp311-macosx_13_0_x86_64.whl", hash = "sha256:9e9e9f43259e7a8281a3a731f42bc14b2bf91bc2d3ef51cd5c49b1cefb6e2389"}, - {file = "chia_rs-0.14.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:a87faa328af72e105e3bf02f276e225aabcba4748c392555905bc8be211ef6d1"}, - {file = "chia_rs-0.14.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:138c0f42d522a97a9486440ecdd943dcd58b38b96d4830f4fe6f00413dcfadf1"}, - {file = "chia_rs-0.14.0-cp311-none-win_amd64.whl", hash = "sha256:4b6265ebe1349bfc743db19a2a9c33fc79e97826f2acfe26554375cd929628c8"}, - {file = "chia_rs-0.14.0-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:740d4ac6222e82fc0dc2fddc04148d0504b383ee68f3ae094f91bc9a2936d20d"}, - {file = "chia_rs-0.14.0-cp312-cp312-macosx_13_0_x86_64.whl", hash = "sha256:e0757077264605cdaa7e0f49b95fc8c075808348cd640e30ce9c40132b107d42"}, - {file = "chia_rs-0.14.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:49c282441e23c089aa94d33b1a24d1324383aedb5e20af9b42d6e87a4f26ec1f"}, - {file = "chia_rs-0.14.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:c247aef6154194670338ad2e95783dadc5a82b5f671edb3c9314dd95505553a4"}, - {file = "chia_rs-0.14.0-cp312-none-win_amd64.whl", hash = "sha256:75a51561e3bd375884853492e7a8f41162694593f39deb1d2757f98795d311aa"}, - {file = "chia_rs-0.14.0-cp38-cp38-macosx_13_0_arm64.whl", hash = "sha256:40873da635ea0a253e006eb427f5823b2123ed9045bf0a548902035b0c7bd214"}, - {file = "chia_rs-0.14.0-cp38-cp38-macosx_13_0_x86_64.whl", hash = "sha256:fcb4fe4ebcaac87780c54a7fac12dea3dcd142c061c6b4d3e38e303c7e18857a"}, - {file = "chia_rs-0.14.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:636ba7435aa7f114f0cbf687c2ac7ea868f98c47c8c1b5e7894a1fbc8197d8d3"}, - {file = "chia_rs-0.14.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:db45d48d55554933d71bad7169aa3ea2c2d99d4bd8e37e43e7f84b0fdd5b97a5"}, - {file = "chia_rs-0.14.0-cp38-none-win_amd64.whl", hash = "sha256:5e813775655a41990dc6e9ef4f66c958aa11c0bc43b7a7e68c99c392aab9f560"}, - {file = "chia_rs-0.14.0-cp39-cp39-macosx_13_0_arm64.whl", hash = "sha256:4667bcb01fa2ffcaea02f6e9c9f492319abdd4c0133ab7c65e3601d8d70bfe9b"}, - {file = "chia_rs-0.14.0-cp39-cp39-macosx_13_0_x86_64.whl", hash = "sha256:3ac5861cc1a5093ecea80dbfc6bf152a8cc44610707a0ad4a88fea5c2b019e28"}, - {file = "chia_rs-0.14.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:faca2e80513eaef000663384f1abec39caed642dc5812729550448067322b1f9"}, - {file = "chia_rs-0.14.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:892623e6df27c41e344431bf2f4440f46aacc4a4aa48aff2728b144e6f6a270b"}, - {file = "chia_rs-0.14.0-cp39-none-win_amd64.whl", hash = "sha256:a03362e6283d0fc1bc5063db666dd75da7fd0e52df32eb5a68095e0564bae4ee"}, - {file = "chia_rs-0.14.0.tar.gz", hash = "sha256:6652e7c328e42b31e9be8e985c1bfc1ddcd83cf31e6b5eb9c0a31a641411677b"}, + {file = "chia_rs-0.15.0-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:8e93bd5adc140ab202077c226081d92e6d1eedfa36fac1eaf4ee6436736dfc95"}, + {file = "chia_rs-0.15.0-cp310-cp310-macosx_13_0_x86_64.whl", hash = "sha256:7ede0049c0703fbc11ff6ae7919fa29237941e3984bdb518f11c977b387560da"}, + {file = "chia_rs-0.15.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:236caab6133f654a90f2d5374cadcf1c4f071fa52ff097fa0f16180557adf8eb"}, + {file = "chia_rs-0.15.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:55988ddd58f9fcd10034679b29aaa7146b494be71590e7b940b06a1ea319297f"}, + {file = "chia_rs-0.15.0-cp310-none-win_amd64.whl", hash = "sha256:971d3a1695880860f71249f5b2159546003b0b74b82499a28a2495720e8922bb"}, + {file = "chia_rs-0.15.0-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:98e773b6876243fd02e202c44162c4ab38d57a6e731f8de790f46f8646596d1d"}, + {file = "chia_rs-0.15.0-cp311-cp311-macosx_13_0_x86_64.whl", hash = "sha256:457a82ef5e0f10236fbc86279b1dbfa64f84c4a7e40e2a28d105da653b9112a6"}, + {file = "chia_rs-0.15.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:1d7b376f0884c2aef3bde9064b5cebfb86f0319108c06faea8c223ee18505ab2"}, + {file = "chia_rs-0.15.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:7e8e1b3836b4c003587e49e6612d6bb5c84d6ef91cf269a955b809789df1b007"}, + {file = "chia_rs-0.15.0-cp311-none-win_amd64.whl", hash = "sha256:4e51d4b6c42a5e36c6c3759a11cb3a002d528805ebbef298caf115a8236136df"}, + {file = "chia_rs-0.15.0-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:ea69015e2cf69e90b83055c7eec1964efd14c8a4f3a739e30e8d9225fae67666"}, + {file = "chia_rs-0.15.0-cp312-cp312-macosx_13_0_x86_64.whl", hash = "sha256:e3683ff9c01b2f2e0ae4e78a050e03249773f78141f152d905b0586a37319b75"}, + {file = "chia_rs-0.15.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:68b4a8547f4593917f960f7ca6b6a68a596b27dd35a309646f549830aae94692"}, + {file = "chia_rs-0.15.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:a9450ebf4eec7ce232bcda2c2c3c42aa97e216317ab91a56bfc1f35460ae052f"}, + {file = "chia_rs-0.15.0-cp312-none-win_amd64.whl", hash = "sha256:5cd8d45912073a7c12e2057e264dbd3bb3ea0c9cd47459ae31a669edb0f7e6dc"}, + {file = "chia_rs-0.15.0-cp38-cp38-macosx_13_0_arm64.whl", hash = "sha256:0773a409f9258d93eb6ce25229515c147648304dd5afc35a43d502c7e7364cc4"}, + {file = "chia_rs-0.15.0-cp38-cp38-macosx_13_0_x86_64.whl", hash = "sha256:db905d00ff0a9bbbd35211ee9969266af503d868535ec800384e4603bcb06269"}, + {file = "chia_rs-0.15.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:653db4b283fa81c35434391ea70bd36f01f09ede20b0b04101eea79ef30d7cfd"}, + {file = "chia_rs-0.15.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:7fdde0557a074cb66edfece843d965c383c097c7fdccd7bf09d6516b4f7db5d2"}, + {file = "chia_rs-0.15.0-cp38-none-win_amd64.whl", hash = "sha256:8c71979d457d17b6dc082c1b85e760dc81a6dd1da35bebe9f17087b371b05286"}, + {file = "chia_rs-0.15.0-cp39-cp39-macosx_13_0_arm64.whl", hash = "sha256:4c86a8902a72c224702225b75ca53b6bb1a5338589e875173c0bcb640dc75776"}, + {file = "chia_rs-0.15.0-cp39-cp39-macosx_13_0_x86_64.whl", hash = "sha256:7044c5536d99dfa6785f14ea40a4cc97847188e256e3c887c22207592402891a"}, + {file = "chia_rs-0.15.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:7e97bd6b05c410ee6be4565bf529a55ecd8d1b2af25ac32adc7c70ceaf01607b"}, + {file = "chia_rs-0.15.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:779e76222951a4fcfb889640c0780f52eac92ddc30ad9d2f1d9f0b6cd13bf3ab"}, + {file = "chia_rs-0.15.0-cp39-none-win_amd64.whl", hash = "sha256:801a342d93df8fd571b1289b58f990a95b90676fd37924cfc631d9dd2ab65a07"}, + {file = "chia_rs-0.15.0.tar.gz", hash = "sha256:582e107e53fc5eb1a4e6d851af8f00a1fd794699689e69a3fdf4ce20e6b73665"}, ] [package.dependencies] @@ -3457,4 +3457,4 @@ upnp = ["miniupnpc"] [metadata] lock-version = "2.0" python-versions = ">=3.8.10, <3.13" -content-hash = "47fce9cd8892399d3803fe73442a8875d79b2439e9462aaab6cf659cf16b2e2f" +content-hash = "9ed9e509b09713592b90aad62ebf6bd4812df6972934781526902538bdb8328e" diff --git a/pyproject.toml b/pyproject.toml index f8bd6b1b1655..0101b422140b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -46,7 +46,7 @@ bitstring = "4.1.4" # Binary data management library boto3 = "1.34.143" # AWS S3 for Data Layer S3 plugin chiabip158 = "1.5.1" # bip158-style wallet filters chiapos = "2.0.4" # proof of space -chia_rs = "0.14.0" +chia_rs = "0.15.0" chiavdf = "1.1.6" # timelord and vdf verification click = "8.1.7" # For the CLI clvm = "0.9.10" diff --git a/tools/analyze-chain.py b/tools/analyze-chain.py index 0a69aed6e2da..2a3092b2f9e6 100755 --- a/tools/analyze-chain.py +++ b/tools/analyze-chain.py @@ -11,7 +11,15 @@ import click import zstd -from chia_rs import MEMPOOL_MODE, AugSchemeMPL, G1Element, SpendBundleConditions, run_block_generator +from chia_rs import ( + DONT_VALIDATE_SIGNATURE, + MEMPOOL_MODE, + AugSchemeMPL, + G1Element, + G2Element, + SpendBundleConditions, + run_block_generator, +) from chia.consensus.default_constants import DEFAULT_CONSTANTS from chia.types.block_protocol import BlockInfo @@ -34,7 +42,9 @@ def run_gen( bytes(generator_program), block_program_args, DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM, - flags, + flags | DONT_VALIDATE_SIGNATURE, + G2Element(), + None, DEFAULT_CONSTANTS, ) run_time = time() - start_time From d678131ded709e6264127984ae5fc12c6823cafa Mon Sep 17 00:00:00 2001 From: Arvid Norberg Date: Tue, 15 Oct 2024 18:40:47 +0200 Subject: [PATCH 37/69] [CHIA-1087] validate blocks in thread pool (instead of process pool) (#18681) * bump chia_rs dependency and accomodate for changes to SpendBundleConditions and run_block_generator() * use ThreadPoolExecutor instead of ProcessPoolExecutor in pre_validate_blocks_multiprocessing * don't serialize BlockRecords when passing into pre_validate_blocks_multiprocessing * dont' pickle the list of full blocks passed into batch_pre_validate_blocks * don't serialize conditions when passed to batch_pre_validate_blocks * don't serialize previous session blocks passed to batch_pre_validate_blocks * don't serialize the return value from batch_pre_validate_blocks * make the batch size 1, in pre_validate_blocks_multiprocessing(). With the jobs running in a thread pool, there's no serialization cost we need to amortize over a batch * make batch_pre_validate_blocks() only validate a single block at a time. rename it to pre_validate_block() * avoid copying recent_blocks into each validation job. since they run in a thread we can just use the blockchain object directly * merge loops over the blocks in pre_validate_blocks_multiprocessing(), to simplify the code and to build fewer temporary lists --- chia/consensus/blockchain.py | 11 +- chia/consensus/multiprocess_validation.py | 284 ++++++++-------------- chia/full_node/full_node.py | 1 - 3 files changed, 104 insertions(+), 192 deletions(-) diff --git a/chia/consensus/blockchain.py b/chia/consensus/blockchain.py index 82ad73687c88..f423960feca1 100644 --- a/chia/consensus/blockchain.py +++ b/chia/consensus/blockchain.py @@ -6,10 +6,8 @@ import logging import time import traceback -from concurrent.futures import Executor -from concurrent.futures.process import ProcessPoolExecutor +from concurrent.futures import Executor, ThreadPoolExecutor from enum import Enum -from multiprocessing.context import BaseContext from pathlib import Path from typing import TYPE_CHECKING, ClassVar, Dict, List, Optional, Set, Tuple, cast @@ -48,7 +46,6 @@ from chia.util.inline_executor import InlineExecutor from chia.util.ints import uint16, uint32, uint64, uint128 from chia.util.priority_mutex import PriorityMutex -from chia.util.setproctitle import getproctitle, setproctitle log = logging.getLogger(__name__) @@ -126,7 +123,6 @@ async def create( consensus_constants: ConsensusConstants, blockchain_dir: Path, reserved_cores: int, - multiprocessing_context: Optional[BaseContext] = None, *, single_threaded: bool = False, ) -> Blockchain: @@ -145,11 +141,8 @@ async def create( else: cpu_count = available_logical_cores() num_workers = max(cpu_count - reserved_cores, 1) - self.pool = ProcessPoolExecutor( + self.pool = ThreadPoolExecutor( max_workers=num_workers, - mp_context=multiprocessing_context, - initializer=setproctitle, - initargs=(f"{getproctitle()}_block_validation_worker",), ) log.info(f"Started {num_workers} processes for block validation") diff --git a/chia/consensus/multiprocess_validation.py b/chia/consensus/multiprocess_validation.py index 6a434b1eef8c..46b31b99edf3 100644 --- a/chia/consensus/multiprocess_validation.py +++ b/chia/consensus/multiprocess_validation.py @@ -13,7 +13,7 @@ from chia.consensus.block_header_validation import validate_finished_header_block from chia.consensus.block_record import BlockRecord -from chia.consensus.blockchain_interface import BlocksProtocol +from chia.consensus.blockchain_interface import BlockRecordsProtocol, BlocksProtocol from chia.consensus.constants import ConsensusConstants from chia.consensus.cost_calculator import NPCResult from chia.consensus.full_block_to_block_record import block_to_block_record @@ -30,7 +30,6 @@ from chia.types.unfinished_block import UnfinishedBlock from chia.types.validation_state import ValidationState from chia.util.augmented_chain import AugmentedBlockchain -from chia.util.block_cache import BlockCache from chia.util.condition_tools import pkm_pairs from chia.util.errors import Err, ValidationError from chia.util.generator_tools import get_block_header, tx_removals_and_additions @@ -50,115 +49,88 @@ class PreValidationResult(Streamable): timing: uint32 # the time (in milliseconds) it took to pre-validate the block -def batch_pre_validate_blocks( +def pre_validate_block( constants: ConsensusConstants, - blocks_pickled: Dict[bytes, bytes], - full_blocks_pickled: List[bytes], - prev_transaction_generators: List[Optional[List[bytes]]], - conditions: Dict[uint32, bytes], - expected_difficulty: List[uint64], - expected_sub_slot_iters: List[uint64], + blockchain: BlockRecordsProtocol, + block: FullBlock, + prev_generators: Optional[List[bytes]], + conds: Optional[SpendBundleConditions], + vs: ValidationState, validate_signatures: bool, - prev_ses_block_bytes: Optional[List[Optional[bytes]]] = None, -) -> List[bytes]: - blocks: Dict[bytes32, BlockRecord] = {} - for k, v in blocks_pickled.items(): - blocks[bytes32(k)] = BlockRecord.from_bytes_unchecked(v) - results: List[PreValidationResult] = [] +) -> PreValidationResult: - # In this case, we are validating full blocks, not headers - for i in range(len(full_blocks_pickled)): - try: - validation_start = time.monotonic() - block: FullBlock = FullBlock.from_bytes_unchecked(full_blocks_pickled[i]) - tx_additions: List[Coin] = [] - removals: List[bytes32] = [] - conds: Optional[SpendBundleConditions] = None - if block.height in conditions: - conds = SpendBundleConditions.from_bytes(conditions[block.height]) - removals, tx_additions = tx_removals_and_additions(conds) - elif block.transactions_generator is not None: - # TODO: this function would be simpler if conditions were - # required to be passed in for all transaction blocks. We would - # no longer need prev_transaction_generators - prev_generators = prev_transaction_generators[i] - assert prev_generators is not None - assert block.transactions_info is not None - block_generator = BlockGenerator(block.transactions_generator, prev_generators) - assert block_generator.program == block.transactions_generator - npc_result = get_name_puzzle_conditions( - block_generator, - min(constants.MAX_BLOCK_COST_CLVM, block.transactions_info.cost), - mempool_mode=False, - height=block.height, - constants=constants, + try: + validation_start = time.monotonic() + tx_additions: List[Coin] = [] + removals: List[bytes32] = [] + if conds is not None: + removals, tx_additions = tx_removals_and_additions(conds) + elif block.transactions_generator is not None: + # TODO: this function would be simpler if conds was + # required to be passed in for all transaction blocks. We would + # no longer need prev_generators + assert prev_generators is not None + assert block.transactions_info is not None + block_generator = BlockGenerator(block.transactions_generator, prev_generators) + assert block_generator.program == block.transactions_generator + npc_result = get_name_puzzle_conditions( + block_generator, + min(constants.MAX_BLOCK_COST_CLVM, block.transactions_info.cost), + mempool_mode=False, + height=block.height, + constants=constants, + ) + if npc_result.error is not None: + validation_time = time.monotonic() - validation_start + return PreValidationResult( + uint16(npc_result.error), None, npc_result.conds, False, uint32(validation_time * 1000) ) - if npc_result.error is not None: - validation_time = time.monotonic() - validation_start - results.append( - PreValidationResult( - uint16(npc_result.error), None, npc_result.conds, False, uint32(validation_time * 1000) - ) - ) - continue - assert npc_result.conds is not None - conds = npc_result.conds - removals, tx_additions = tx_removals_and_additions(conds) + assert npc_result.conds is not None + conds = npc_result.conds + removals, tx_additions = tx_removals_and_additions(conds) - header_block = get_block_header(block, tx_additions, removals) - prev_ses_block = None - if prev_ses_block_bytes is not None and len(prev_ses_block_bytes) > 0: - buffer = prev_ses_block_bytes[i] - if buffer is not None: - prev_ses_block = BlockRecord.from_bytes_unchecked(buffer) - required_iters, error = validate_finished_header_block( - constants, - BlockCache(blocks), - header_block, - True, # check_filter - expected_difficulty[i], - expected_sub_slot_iters[i], - prev_ses_block=prev_ses_block, - ) - error_int: Optional[uint16] = None - if error is not None: - error_int = uint16(error.code.value) + header_block = get_block_header(block, tx_additions, removals) + required_iters, error = validate_finished_header_block( + constants, + blockchain, + header_block, + True, # check_filter + vs.current_difficulty, + vs.current_ssi, + prev_ses_block=vs.prev_ses_block, + ) + error_int: Optional[uint16] = None + if error is not None: + error_int = uint16(error.code.value) - successfully_validated_signatures = False - # If we failed header block validation, no need to validate - # signature, the block is already invalid If this is False, it means - # either we don't have a signature (not a tx block) or we have an - # invalid signature (which also puts in an error) or we didn't - # validate the signature because we want to validate it later. - # add_block will attempt to validate the signature later. - if error_int is None and validate_signatures and conds is not None: - assert block.transactions_info is not None - pairs_pks, pairs_msgs = pkm_pairs(conds, constants.AGG_SIG_ME_ADDITIONAL_DATA) - if not AugSchemeMPL.aggregate_verify( - pairs_pks, pairs_msgs, block.transactions_info.aggregated_signature - ): - error_int = uint16(Err.BAD_AGGREGATE_SIGNATURE.value) - else: - successfully_validated_signatures = True + successfully_validated_signatures = False + # If we failed header block validation, no need to validate + # signature, the block is already invalid If this is False, it means + # either we don't have a signature (not a tx block) or we have an + # invalid signature (which also puts in an error) or we didn't + # validate the signature because we want to validate it later. + # add_block will attempt to validate the signature later. + if error_int is None and validate_signatures and conds is not None: + assert block.transactions_info is not None + pairs_pks, pairs_msgs = pkm_pairs(conds, constants.AGG_SIG_ME_ADDITIONAL_DATA) + if not AugSchemeMPL.aggregate_verify(pairs_pks, pairs_msgs, block.transactions_info.aggregated_signature): + error_int = uint16(Err.BAD_AGGREGATE_SIGNATURE.value) + else: + successfully_validated_signatures = True - validation_time = time.monotonic() - validation_start - results.append( - PreValidationResult( - error_int, - required_iters, - conds, - successfully_validated_signatures, - uint32(validation_time * 1000), - ) - ) - except Exception: - error_stack = traceback.format_exc() - log.error(f"Exception: {error_stack}") - validation_time = time.monotonic() - validation_start - results.append( - PreValidationResult(uint16(Err.UNKNOWN.value), None, None, False, uint32(validation_time * 1000)) - ) - return [bytes(r) for r in results] + validation_time = time.monotonic() - validation_start + return PreValidationResult( + error_int, + required_iters, + conds, + successfully_validated_signatures, + uint32(validation_time * 1000), + ) + except Exception: + error_stack = traceback.format_exc() + log.error(f"Exception: {error_stack}") + validation_time = time.monotonic() - validation_start + return PreValidationResult(uint16(Err.UNKNOWN.value), None, None, False, uint32(validation_time * 1000)) async def pre_validate_blocks_multiprocessing( @@ -187,49 +159,28 @@ async def pre_validate_blocks_multiprocessing( """ prev_b: Optional[BlockRecord] = None - # Collects all the recent blocks (up to the previous sub-epoch) - recent_blocks: Dict[bytes32, BlockRecord] = {} - num_sub_slots_found = 0 - num_blocks_seen = 0 - if blocks[0].height > 0: curr = block_records.try_block_record(blocks[0].prev_header_hash) if curr is None: return [PreValidationResult(uint16(Err.INVALID_PREV_BLOCK_HASH.value), None, None, False, uint32(0))] prev_b = curr - num_sub_slots_to_look_for = 3 if curr.overflow else 2 - header_hash = curr.header_hash - while ( - curr.sub_epoch_summary_included is None - or num_blocks_seen < constants.NUMBER_OF_TIMESTAMPS - or num_sub_slots_found < num_sub_slots_to_look_for - ) and curr.height > 0: - if curr.first_in_sub_slot: - assert curr.finished_challenge_slot_hashes is not None - num_sub_slots_found += len(curr.finished_challenge_slot_hashes) - recent_blocks[header_hash] = curr - if curr.is_transaction_block: - num_blocks_seen += 1 - header_hash = curr.prev_hash - curr = block_records.block_record(curr.prev_hash) - assert curr is not None - recent_blocks[header_hash] = curr # the agumented blockchain object will let us add temporary block records # they won't actually be added to the underlying blockchain object blockchain = AugmentedBlockchain(block_records) - diff_ssis: List[ValidationState] = [] - prev_ses_block_list: List[Optional[BlockRecord]] = [] + futures = [] + # Pool of workers to validate blocks concurrently for block in blocks: + assert isinstance(block, FullBlock) if len(block.finished_sub_slots) > 0: if block.finished_sub_slots[0].challenge_chain.new_difficulty is not None: vs.current_difficulty = block.finished_sub_slots[0].challenge_chain.new_difficulty if block.finished_sub_slots[0].challenge_chain.new_sub_slot_iters is not None: vs.current_ssi = block.finished_sub_slots[0].challenge_chain.new_sub_slot_iters overflow = is_overflow_block(constants, block.reward_chain_block.signage_point_index) - challenge = get_block_challenge(constants, block, BlockCache(recent_blocks), prev_b is None, overflow, False) + challenge = get_block_challenge(constants, block, blockchain, prev_b is None, overflow, False) if block.reward_chain_block.challenge_chain_sp_vdf is None: cc_sp_hash: bytes32 = challenge else: @@ -267,74 +218,43 @@ async def pre_validate_blocks_multiprocessing( log.error("sub_epoch_summary does not match wp sub_epoch_summary list") return [PreValidationResult(uint16(Err.INVALID_SUB_EPOCH_SUMMARY.value), None, None, False, uint32(0))] - recent_blocks[block_rec.header_hash] = block_rec blockchain.add_extra_block(block, block_rec) # Temporarily add block to chain prev_b = block_rec - diff_ssis.append(copy.copy(vs)) - prev_ses_block_list.append(vs.prev_ses_block) - if block_rec.sub_epoch_summary_included is not None: - vs.prev_ses_block = block_rec - conditions_pickled = {} - for k, v in block_height_conds_map.items(): - conditions_pickled[k] = bytes(v) - futures = [] - # Pool of workers to validate blocks concurrently - recent_blocks_bytes = {bytes(k): bytes(v) for k, v in recent_blocks.items()} # convert to bytes + previous_generators: Optional[List[bytes]] = None - batch_size = 4 - for i in range(0, len(blocks), batch_size): - end_i = min(i + batch_size, len(blocks)) - blocks_to_validate = blocks[i:end_i] - b_pickled: List[bytes] = [] - previous_generators: List[Optional[List[bytes]]] = [] - for block in blocks_to_validate: - assert isinstance(block, FullBlock) - b_pickled.append(bytes(block)) - try: - block_generator: Optional[BlockGenerator] = await get_block_generator( - blockchain.lookup_block_generators, block - ) - except ValueError: - return [ - PreValidationResult( - uint16(Err.FAILED_GETTING_GENERATOR_MULTIPROCESSING.value), None, None, False, uint32(0) - ) - ] + try: + block_generator: Optional[BlockGenerator] = await get_block_generator( + blockchain.lookup_block_generators, block + ) if block_generator is not None: - previous_generators.append(block_generator.generator_refs) - else: - previous_generators.append(None) - - ses_blocks_bytes_list: List[Optional[bytes]] = [] - for j in range(i, end_i): - ses_block_rec = prev_ses_block_list[j] - if ses_block_rec is None: - ses_blocks_bytes_list.append(None) - else: - ses_blocks_bytes_list.append(bytes(ses_block_rec)) + previous_generators = block_generator.generator_refs + except ValueError: + return [ + PreValidationResult( + uint16(Err.FAILED_GETTING_GENERATOR_MULTIPROCESSING.value), None, None, False, uint32(0) + ) + ] futures.append( asyncio.get_running_loop().run_in_executor( pool, - batch_pre_validate_blocks, + pre_validate_block, constants, - recent_blocks_bytes, - b_pickled, + blockchain, + block, previous_generators, - conditions_pickled, - [diff_ssis[j].current_difficulty for j in range(i, end_i)], - [diff_ssis[j].current_ssi for j in range(i, end_i)], + block_height_conds_map.get(block.height), + copy.copy(vs), validate_signatures, - ses_blocks_bytes_list, ) ) + + if block_rec.sub_epoch_summary_included is not None: + vs.prev_ses_block = block_rec + # Collect all results into one flat list - return [ - PreValidationResult.from_bytes(result) - for batch_result in (await asyncio.gather(*futures)) - for result in batch_result - ] + return list(await asyncio.gather(*futures)) def _run_generator( diff --git a/chia/full_node/full_node.py b/chia/full_node/full_node.py index aed5683db503..7d64705f594d 100644 --- a/chia/full_node/full_node.py +++ b/chia/full_node/full_node.py @@ -275,7 +275,6 @@ async def manage(self) -> AsyncIterator[None]: consensus_constants=self.constants, blockchain_dir=self.db_path.parent, reserved_cores=reserved_cores, - multiprocessing_context=self.multiprocessing_context, single_threaded=single_threaded, ) From ef11555e45c4222f1ac481479cda3d2f731ca523 Mon Sep 17 00:00:00 2001 From: Earle Lowe <30607889+emlowe@users.noreply.github.com> Date: Tue, 15 Oct 2024 11:01:51 -0700 Subject: [PATCH 38/69] Update changelog for 2.4.4 (#18700) --- CHANGELOG.md | 84 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 84 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 537f83ec1be6..b093fe8f6b87 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,90 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project does not yet adhere to [Semantic Versioning](https://semver.org/spec/v2.0.0.html) for setuptools_scm/PEP 440 reasons. +## 2.4.4 Chia blockchain 2024-10-15 + +## What's Changed + +### Added + +- Build both sdist and wheel for upload to pypi +- Add a `fee` option to `push_transactions` +- Add bech32m options to some key functions +- Add `--valid-at/--expires-at` to all CLI transaction RPC endpoints +- Add TXConfig args to coin commands +- Add missing client endpoints for corresponding Wallet RPC endpoints +- Add version number to every log line + +### Changed + +- Remove `block_height_list` from `BlockGenerator` +- Display sync percentage in 'chia show -s' +- Make 'chia plotnft -f fingerprint -i wallet_id' output JSON +- make Program.run() and Program.run_with_cost() default to enabling all the most recent features +- Remove soft-fork 4 special cases +- Log the rate at which blocks are added during long sync +- Rename `Spend` to `SpendConditions` +- Remove `Backend*` warning ignores +- Replace `get_flags_for_height_and_constants()` with Rust version +- Refactor `get_puzzle_and_solution_for_coin()` and introduce `get_puzzle_and_solution_for_coin2()` +- Warm up the cache once per batch in `pre_validate_blocks_multiprocessing` +- Cleanup and convert to a protocol for `BlockchainInterface` +- Update `BlockGenerator` type +- Extract coin splitting and combining logic from CLI and move to RPC +- Update long-reorg tests along with the reorg test chains +- Switch mempool TX prevalidation to the Rust version +- Remove `initial_freeze_period` from RPCs +- Introduce new `AugmentedBlockchain` class +- Use smarter coin selection algorithm for DAO wallet `select_coins_for_asset_type` +- Refactor `multiprocess_validation` +- Deduct block overhead from the mempool's maximum block clvm cost limit +- Update to macOS 13 for build and test +- Simplify batch pre validate blocks +- Add a configurable limit to the amount of DIDs that can be automatically added to the users wallet from transfer +- Datalayer: Revert ancestors table schema change from #18100 +- Datalayer: separate DAT files in folders by store id +- Datalayer: Reduce level log spam when downloading DAT files +- Datalayer: Limit full file creation when processing subscription generations +- Bump `aiohttp` to `3.10.4` +- Bump `chia_rs` to `0.14.0` +- Bump `chiavdf` to `1.1.6` +- Bump `cryptography` to `43.0.1` +- Bump `dnslib` to `0.9.25` +- Bump `pip` to `24.2` +- Bump `setuptools` to `75.1.0` + +### Fixed + +- refactor `new_peak_timelord` +- Fixed missing partial count was incorrectly incremented on solo plotNFT farming +- Timelord logging: Updated peak to hex from bytestring +- Source offer confirmed height from original coin state (fixes #18330) +- fix long sync cache +- Fix `request_fee_estimates` (thanks @Yakuhito) +- Fix confusing amount-related CLI help messages +- Fix `raise` on too much decimal precision in CLI +- Remove the coin amount from state layer solution +- Fix `BrokenProcessPool` error for Windows installer blueboxing +- Check to confirm external TXs submitted by wallet +- Correctly set `start_index` in `create_more_puzzle_hashes` +- Use better key resolution logic in derivation commands +- Fix new pool url detection (thanks @felixbrucker) +- Fixed logging for fast bluebox timelord (thanks @thesemaphoreslim) +- remove no-wallet-found traceback from `/dl_owned_singletons` (fixes #18518) +- Fix DID balance reporting, and port DID tests to `WalletTestFramwork` +- Fix object has no attribute code errors +- Fix fee behavior with `chia wallet coins combine` +- Fix install.sh upgrade issue (thanks @wallentx) (fixes #18672) +- fix some comments typos (thanks @pengbanban, @murongshaozong, @linchizhen) + +### Deprecated + +- Python 3.8 is deprecated per the Python EOL schedule and this release (2.4.4) will be the last to support Python 3.8 + +### Removed + +- Support for macOS 12 (Monterey) + ## 2.4.3 Chia blockchain 2024-08-21 ## What's Changed From ea5402f5941181f43303c54ea27d4bccb294845f Mon Sep 17 00:00:00 2001 From: Earle Lowe <30607889+emlowe@users.noreply.github.com> Date: Tue, 15 Oct 2024 12:29:33 -0700 Subject: [PATCH 39/69] Update CHANGELOG.md --- CHANGELOG.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index b093fe8f6b87..e58a50408abe 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -82,6 +82,10 @@ for setuptools_scm/PEP 440 reasons. - Fix install.sh upgrade issue (thanks @wallentx) (fixes #18672) - fix some comments typos (thanks @pengbanban, @murongshaozong, @linchizhen) +### Known Issues + +- The wallet rpc `combine_coins` requires `number_of_coins` to be set explicitly in the request and otherwise will return an error + ### Deprecated - Python 3.8 is deprecated per the Python EOL schedule and this release (2.4.4) will be the last to support Python 3.8 From 21c6f397ef24a3f6f016a7f0ae46b51a06daa398 Mon Sep 17 00:00:00 2001 From: matt-o-how <48453825+matt-o-how@users.noreply.github.com> Date: Wed, 16 Oct 2024 16:50:44 +0100 Subject: [PATCH 40/69] Require fewer arguments for `chia wallet coins split` in the CLI (#18689) * initial commit * add test the other way around * isort * black and mypy * catch coin not found * black * add extra tests for code coverage * remove assert coins is not None --- chia/_tests/cmds/wallet/test_coins.py | 133 +++++++++++++++++++++++++- chia/cmds/coin_funcs.py | 23 ++++- chia/cmds/coins.py | 4 +- 3 files changed, 154 insertions(+), 6 deletions(-) diff --git a/chia/_tests/cmds/wallet/test_coins.py b/chia/_tests/cmds/wallet/test_coins.py index 9e00a66d523b..4f2d377e08f3 100644 --- a/chia/_tests/cmds/wallet/test_coins.py +++ b/chia/_tests/cmds/wallet/test_coins.py @@ -2,12 +2,15 @@ import dataclasses from pathlib import Path -from typing import Tuple +from typing import List, Optional, Tuple from chia._tests.cmds.cmd_test_utils import TestRpcClients, TestWalletRpcClient, logType, run_cli_command_and_assert from chia._tests.cmds.wallet.test_consts import FINGERPRINT, FINGERPRINT_ARG, STD_TX, STD_UTX, get_bytes32 from chia.rpc.wallet_request_types import CombineCoins, CombineCoinsResponse, SplitCoins, SplitCoinsResponse +from chia.types.blockchain_format.coin import Coin +from chia.types.blockchain_format.program import Program from chia.types.blockchain_format.sized_bytes import bytes32 +from chia.types.coin_record import CoinRecord from chia.util.ints import uint16, uint32, uint64 from chia.wallet.conditions import ConditionValidTimes from chia.wallet.util.tx_config import DEFAULT_TX_CONFIG, CoinSelectionConfig, TXConfig @@ -141,6 +144,7 @@ async def combine_coins( def test_coins_split(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients + test_coin = Coin(Program.to(0).get_tree_hash(), Program.to(1).get_tree_hash(), uint64(10_000_000_000_000)) # set RPC Client class CoinsSplitRpcClient(TestWalletRpcClient): @@ -150,9 +154,28 @@ async def split_coins( self.add_to_log("split_coins", (args, tx_config, timelock_info)) return SplitCoinsResponse([STD_UTX], [STD_TX]) + async def get_coin_records_by_names( + self, + names: List[bytes32], + include_spent_coins: bool = True, + start_height: Optional[int] = None, + end_height: Optional[int] = None, + ) -> List[CoinRecord]: + cr = CoinRecord( + test_coin, + uint32(10), + uint32(0), + False, + uint64(0), + ) + if names[0] == test_coin.name(): + return [cr] + else: + return [] + inst_rpc_client = CoinsSplitRpcClient() # pylint: disable=no-value-for-parameter test_rpc_clients.wallet_rpc_client = inst_rpc_client - target_coin_id = get_bytes32(1) + target_coin_id = test_coin.name() command_args = [ "wallet", "coins", @@ -193,3 +216,109 @@ async def split_coins( ], } test_rpc_clients.wallet_rpc_client.check_log(expected_calls) + + command_args = [ + "wallet", + "coins", + "split", + FINGERPRINT_ARG, + "-i1", + "-m0.001", + "-a0.5", # split into coins of amount 0.5 XCH or 500_000_000_000 mojo + f"-t{target_coin_id.hex()}", + "--valid-at", + "100", + "--expires-at", + "150", + ] + assert_list = [] + run_cli_command_and_assert(capsys, root_dir, command_args, assert_list) + expected_calls = { + "get_wallets": [(None,)], + "get_sync_status": [()], + "split_coins": [ + ( + SplitCoins( + wallet_id=uint32(1), + number_of_coins=uint16( + 20 + ), # this transaction should be equivalent to specifying 20 x 0.5xch coins + amount_per_coin=uint64(500_000_000_000), + target_coin_id=target_coin_id, + fee=uint64(1_000_000_000), + push=True, + ), + DEFAULT_TX_CONFIG, + test_condition_valid_times, + ) + ], + } + test_rpc_clients.wallet_rpc_client.check_log(expected_calls) + # try the split the other way around + command_args = [ + "wallet", + "coins", + "split", + FINGERPRINT_ARG, + "-i1", + "-m0.001", + "-n20", # split target coin into 20 coins of even amounts + f"-t{target_coin_id.hex()}", + "--valid-at", + "100", + "--expires-at", + "150", + ] + run_cli_command_and_assert(capsys, root_dir, command_args, assert_list) + test_rpc_clients.wallet_rpc_client.check_log(expected_calls) + # Test missing both inputs + command_args = [ + "wallet", + "coins", + "split", + FINGERPRINT_ARG, + "-i1", + "-m0.001", + f"-t{target_coin_id.hex()}", + "--valid-at", + "100", + "--expires-at", + "150", + ] + # these are various things that should be in the output + assert_list = ["Must use either -a or -n. For more information run --help."] + run_cli_command_and_assert(capsys, root_dir, command_args, assert_list) + + # Test missing coin not found both ways + target_coin_id = get_bytes32(1) + assert_list = ["Could not find target coin."] + command_args = [ + "wallet", + "coins", + "split", + FINGERPRINT_ARG, + "-i1", + "-m0.001", + "-n20", # split target coin into 20 coins of even amounts + f"-t{target_coin_id.hex()}", + "--valid-at", + "100", + "--expires-at", + "150", + ] + run_cli_command_and_assert(capsys, root_dir, command_args, assert_list) + command_args = [ + "wallet", + "coins", + "split", + FINGERPRINT_ARG, + "-i1", + "-m0.001", + "-a0.5", # split into coins of amount 0.5 XCH or 500_000_000_000 mojo + f"-t{target_coin_id.hex()}", + "--valid-at", + "100", + "--expires-at", + "150", + ] + run_cli_command_and_assert(capsys, root_dir, command_args, assert_list) diff --git a/chia/cmds/coin_funcs.py b/chia/cmds/coin_funcs.py index 7b621a7ed827..c6c61f36205c 100644 --- a/chia/cmds/coin_funcs.py +++ b/chia/cmds/coin_funcs.py @@ -197,8 +197,8 @@ async def async_split( fingerprint: Optional[int], wallet_id: int, fee: uint64, - number_of_coins: int, - amount_per_coin: CliAmount, + number_of_coins: Optional[int], + amount_per_coin: Optional[CliAmount], target_coin_id: bytes32, max_coin_amount: CliAmount, min_coin_amount: CliAmount, @@ -219,6 +219,25 @@ async def async_split( print("Wallet not synced. Please wait.") return [] + if number_of_coins is None and amount_per_coin is None: + print("Must use either -a or -n. For more information run --help.") + return [] + + if number_of_coins is None: + coins = await wallet_client.get_coin_records_by_names([target_coin_id]) + if len(coins) == 0: + print("Could not find target coin.") + return [] + assert amount_per_coin is not None + number_of_coins = int(coins[0].coin.amount // amount_per_coin.convert_amount(mojo_per_unit)) + elif amount_per_coin is None: + coins = await wallet_client.get_coin_records_by_names([target_coin_id]) + if len(coins) == 0: + print("Could not find target coin.") + return [] + assert number_of_coins is not None + amount_per_coin = CliAmount(True, uint64(coins[0].coin.amount // number_of_coins)) + final_amount_per_coin = amount_per_coin.convert_amount(mojo_per_unit) tx_config = CMDTXConfigLoader( diff --git a/chia/cmds/coins.py b/chia/cmds/coins.py index a811985ba29a..d2c5124d70da 100644 --- a/chia/cmds/coins.py +++ b/chia/cmds/coins.py @@ -167,7 +167,7 @@ def combine_cmd( "--number-of-coins", type=int, help="The number of coins we are creating.", - required=True, + default=None, ) @options.create_fee() @click.option( @@ -175,7 +175,7 @@ def combine_cmd( "--amount-per-coin", help="The amount of each newly created coin, in XCH or CAT units", type=AmountParamType(), - required=True, + default=None, ) @click.option( "-t", "--target-coin-id", type=Bytes32ParamType(), required=True, help="The coin id of the coin we are splitting." From 3ce4acb49c101cc0c1027b35e36ef2beb5173d94 Mon Sep 17 00:00:00 2001 From: Matt Hauff Date: Wed, 16 Oct 2024 11:25:03 -0700 Subject: [PATCH 41/69] [CHIA-1635] Add default for `number_of_coins` in `CombineCoins` (#18704) Add default for `number_of_coins` in `CombineCoins` --- chia/rpc/wallet_request_types.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/chia/rpc/wallet_request_types.py b/chia/rpc/wallet_request_types.py index b2c264bf4f3e..35a9f2c89625 100644 --- a/chia/rpc/wallet_request_types.py +++ b/chia/rpc/wallet_request_types.py @@ -501,7 +501,7 @@ class SplitCoinsResponse(TransactionEndpointResponse): @kw_only_dataclass class CombineCoins(TransactionEndpointRequest): wallet_id: uint32 = field(default_factory=default_raise) - number_of_coins: uint16 = field(default_factory=default_raise) + number_of_coins: uint16 = uint16(500) largest_first: bool = False target_coin_ids: List[bytes32] = field(default_factory=list) target_coin_amount: Optional[uint64] = None From 9c8557a1f8dbf0d80e37d1200a93c30c505145f8 Mon Sep 17 00:00:00 2001 From: Kyle Altendorf Date: Wed, 16 Oct 2024 17:21:15 -0400 Subject: [PATCH 42/69] Remove Python 3.8 support and update source to 3.9 standards (#18687) * manual python 3.8 removals * poetry * pyupgrade * black * isort * manually tidy imports * work around shadowed type * catch up build network protocol files for pyupgrade changes * placate pylint * re-enable 20.04 * placate flake8 --- .../workflows/check_wheel_availability.yaml | 7 +- .github/workflows/pre-commit.yml | 1 - .github/workflows/test-install-scripts.yml | 7 +- .github/workflows/test-single.yml | 13 - .github/workflows/upload-pypi-source.yml | 9 +- .pre-commit-config.yaml | 2 +- Install.ps1 | 2 +- PRETTY_GOOD_PRACTICES.md | 5 +- README.md | 2 +- benchmarks/block_ref.py | 6 +- benchmarks/block_store.py | 3 +- benchmarks/blockchains.py | 2 +- benchmarks/coin_store.py | 11 +- benchmarks/mempool-long-lived.py | 9 +- benchmarks/mempool.py | 21 +- benchmarks/streamable.py | 48 +- benchmarks/utils.py | 5 +- .../blockchain/blockchain_test_utils.py | 10 +- .../_tests/blockchain/test_augmented_chain.py | 12 +- chia/_tests/blockchain/test_blockchain.py | 67 +-- .../test_blockchain_transactions.py | 31 +- chia/_tests/blockchain/test_build_chains.py | 24 +- .../blockchain/test_get_block_generator.py | 10 +- .../blockchain/test_lookup_fork_chain.py | 7 +- chia/_tests/build-init-files.py | 5 +- chia/_tests/build-job-matrix.py | 18 +- chia/_tests/check_sql_statements.py | 5 +- chia/_tests/clvm/coin_store.py | 7 +- chia/_tests/clvm/test_curry_and_treehash.py | 4 +- chia/_tests/clvm/test_puzzle_drivers.py | 6 +- chia/_tests/clvm/test_puzzles.py | 8 +- chia/_tests/clvm/test_singletons.py | 6 +- chia/_tests/cmds/cmd_test_utils.py | 67 +-- chia/_tests/cmds/conftest.py | 4 +- chia/_tests/cmds/test_cmd_framework.py | 7 +- chia/_tests/cmds/test_daemon.py | 6 +- chia/_tests/cmds/test_farm_cmd.py | 3 +- chia/_tests/cmds/test_show.py | 10 +- chia/_tests/cmds/test_tx_config_args.py | 3 +- chia/_tests/cmds/wallet/test_coins.py | 12 +- chia/_tests/cmds/wallet/test_dao.py | 32 +- chia/_tests/cmds/wallet/test_did.py | 38 +- chia/_tests/cmds/wallet/test_nft.py | 24 +- chia/_tests/cmds/wallet/test_notifications.py | 10 +- chia/_tests/cmds/wallet/test_tx_decorators.py | 4 +- chia/_tests/cmds/wallet/test_vcs.py | 24 +- chia/_tests/cmds/wallet/test_wallet.py | 76 +-- chia/_tests/cmds/wallet/test_wallet_check.py | 12 +- chia/_tests/conftest.py | 47 +- chia/_tests/connection_utils.py | 11 +- chia/_tests/core/cmds/test_keys.py | 10 +- chia/_tests/core/cmds/test_wallet.py | 6 +- .../core/consensus/test_block_creation.py | 8 +- chia/_tests/core/custom_types/test_coin.py | 4 +- .../core/custom_types/test_spend_bundle.py | 7 +- chia/_tests/core/daemon/test_daemon.py | 106 ++-- .../_tests/core/daemon/test_keychain_proxy.py | 3 +- chia/_tests/core/data_layer/conftest.py | 7 +- chia/_tests/core/data_layer/test_data_cli.py | 3 +- .../_tests/core/data_layer/test_data_layer.py | 6 +- .../core/data_layer/test_data_layer_util.py | 9 +- chia/_tests/core/data_layer/test_data_rpc.py | 83 ++-- .../_tests/core/data_layer/test_data_store.py | 57 +-- .../core/data_layer/test_data_store_schema.py | 8 +- chia/_tests/core/data_layer/util.py | 17 +- chia/_tests/core/farmer/test_farmer_api.py | 3 +- .../full_node/full_sync/test_full_sync.py | 9 +- chia/_tests/core/full_node/ram_db.py | 4 +- .../core/full_node/stores/test_block_store.py | 4 +- .../core/full_node/stores/test_coin_store.py | 48 +- .../full_node/stores/test_full_node_store.py | 17 +- .../core/full_node/stores/test_hint_store.py | 3 +- chia/_tests/core/full_node/test_conditions.py | 10 +- chia/_tests/core/full_node/test_full_node.py | 35 +- .../core/full_node/test_generator_tools.py | 4 +- .../core/full_node/test_hint_management.py | 4 +- .../_tests/core/full_node/test_performance.py | 3 +- .../full_node/test_tx_processing_queue.py | 6 +- chia/_tests/core/make_block_generator.py | 10 +- chia/_tests/core/mempool/test_mempool.py | 166 +++---- .../core/mempool/test_mempool_fee_protocol.py | 6 +- .../core/mempool/test_mempool_item_queries.py | 4 +- .../core/mempool/test_mempool_manager.py | 85 ++-- .../core/mempool/test_mempool_performance.py | 4 +- .../mempool/test_singleton_fast_forward.py | 22 +- chia/_tests/core/server/serve.py | 8 +- chia/_tests/core/server/test_capabilities.py | 6 +- chia/_tests/core/server/test_dos.py | 7 +- chia/_tests/core/server/test_loop.py | 13 +- .../_tests/core/server/test_node_discovery.py | 3 +- chia/_tests/core/server/test_rate_limits.py | 3 +- chia/_tests/core/server/test_server.py | 10 +- chia/_tests/core/services/test_services.py | 6 +- chia/_tests/core/ssl/test_ssl.py | 6 +- chia/_tests/core/test_cost_calculation.py | 3 +- chia/_tests/core/test_db_conversion.py | 3 +- chia/_tests/core/test_db_validation.py | 5 +- chia/_tests/core/test_farmer_harvester_rpc.py | 13 +- chia/_tests/core/test_filter.py | 4 +- chia/_tests/core/test_full_node_rpc.py | 16 +- chia/_tests/core/test_merkle_set.py | 12 +- chia/_tests/core/test_program.py | 4 +- chia/_tests/core/test_rpc_util.py | 3 +- chia/_tests/core/test_seeder.py | 12 +- chia/_tests/core/util/test_config.py | 26 +- chia/_tests/core/util/test_jsonify.py | 16 +- chia/_tests/core/util/test_keychain.py | 6 +- chia/_tests/core/util/test_keyring_wrapper.py | 3 +- chia/_tests/core/util/test_log_exceptions.py | 6 +- chia/_tests/core/util/test_streamable.py | 72 +-- chia/_tests/db/test_db_wrapper.py | 12 +- chia/_tests/environments/common.py | 4 +- chia/_tests/environments/full_node.py | 4 +- chia/_tests/environments/wallet.py | 56 +-- chia/_tests/farmer_harvester/test_farmer.py | 52 +- .../farmer_harvester/test_farmer_harvester.py | 22 +- .../test_filter_prefix_bits.py | 11 +- .../test_third_party_harvesters.py | 20 +- .../test_fee_estimation_integration.py | 11 +- .../fee_estimation/test_fee_estimation_rpc.py | 54 +-- .../test_fee_estimation_unit_tests.py | 13 +- .../test_mempoolitem_height_added.py | 6 +- chia/_tests/generator/test_compression.py | 4 +- chia/_tests/generator/test_rom.py | 6 +- chia/_tests/plot_sync/test_delta.py | 5 +- chia/_tests/plot_sync/test_plot_sync.py | 47 +- chia/_tests/plot_sync/test_receiver.py | 12 +- chia/_tests/plot_sync/test_sync_simulated.py | 53 +- chia/_tests/plot_sync/util.py | 3 +- chia/_tests/plotting/test_plot_manager.py | 23 +- chia/_tests/plotting/util.py | 3 +- .../pools/test_pool_puzzles_lifecycle.py | 9 +- chia/_tests/pools/test_pool_rpc.py | 35 +- chia/_tests/pools/test_pool_wallet.py | 14 +- chia/_tests/pools/test_wallet_pool_store.py | 6 +- chia/_tests/process_junit.py | 24 +- chia/_tests/rpc/test_rpc_client.py | 5 +- chia/_tests/simulation/test_simulation.py | 6 +- chia/_tests/simulation/test_simulator.py | 18 +- .../_tests/simulation/test_start_simulator.py | 7 +- chia/_tests/timelord/test_new_peak.py | 20 +- chia/_tests/tools/test_run_block.py | 3 +- chia/_tests/tools/test_virtual_project.py | 14 +- chia/_tests/util/benchmarks.py | 3 +- chia/_tests/util/blockchain.py | 21 +- chia/_tests/util/blockchain_mock.py | 30 +- .../util/build_network_protocol_files.py | 7 +- chia/_tests/util/db_connection.py | 5 +- chia/_tests/util/full_sync.py | 9 +- chia/_tests/util/gen_ssl_certs.py | 2 +- chia/_tests/util/generator_tools_testing.py | 8 +- chia/_tests/util/key_tool.py | 5 +- chia/_tests/util/misc.py | 65 +-- chia/_tests/util/protocol_messages_json.py | 222 ++++----- chia/_tests/util/run_block.py | 30 +- chia/_tests/util/setup_nodes.py | 41 +- chia/_tests/util/split_managers.py | 11 +- chia/_tests/util/temp_file.py | 2 +- chia/_tests/util/test_action_scope.py | 3 +- chia/_tests/util/test_async_pool.py | 16 +- chia/_tests/util/test_build_job_matrix.py | 5 +- chia/_tests/util/test_condition_tools.py | 6 +- chia/_tests/util/test_config.py | 10 +- chia/_tests/util/test_dump_keyring.py | 2 +- chia/_tests/util/test_full_block_utils.py | 17 +- chia/_tests/util/test_misc.py | 57 +-- chia/_tests/util/test_network.py | 4 +- .../util/test_network_protocol_files.py | 3 +- .../_tests/util/test_network_protocol_test.py | 6 +- chia/_tests/util/test_paginator.py | 7 +- chia/_tests/util/test_priority_mutex.py | 18 +- chia/_tests/util/test_recursive_replace.py | 4 +- chia/_tests/util/test_testnet_overrides.py | 6 +- chia/_tests/util/test_timing.py | 2 +- chia/_tests/util/test_trusted_peer.py | 4 +- chia/_tests/util/time_out_assert.py | 8 +- .../wallet/cat_wallet/test_cat_lifecycle.py | 20 +- .../wallet/cat_wallet/test_offer_lifecycle.py | 22 +- chia/_tests/wallet/cat_wallet/test_trades.py | 14 +- .../clawback/test_clawback_decorator.py | 8 +- .../clawback/test_clawback_lifecycle.py | 8 +- .../wallet/clawback/test_clawback_metadata.py | 3 +- chia/_tests/wallet/conftest.py | 11 +- .../_tests/wallet/dao_wallet/test_dao_clvm.py | 18 +- .../wallet/dao_wallet/test_dao_wallets.py | 10 +- .../wallet/db_wallet/test_db_graftroot.py | 14 +- .../_tests/wallet/db_wallet/test_dl_offers.py | 6 +- .../_tests/wallet/db_wallet/test_dl_wallet.py | 4 +- chia/_tests/wallet/did_wallet/test_did.py | 7 +- .../wallet/nft_wallet/test_nft_1_offers.py | 5 +- .../wallet/nft_wallet/test_nft_lifecycle.py | 5 +- .../wallet/nft_wallet/test_nft_offers.py | 20 +- .../wallet/nft_wallet/test_nft_puzzles.py | 6 +- .../wallet/nft_wallet/test_nft_wallet.py | 18 +- chia/_tests/wallet/rpc/test_wallet_rpc.py | 36 +- .../simple_sync/test_simple_sync_protocol.py | 9 +- chia/_tests/wallet/sync/test_wallet_sync.py | 49 +- chia/_tests/wallet/test_address_type.py | 40 +- chia/_tests/wallet/test_clvm_streamable.py | 8 +- chia/_tests/wallet/test_coin_selection.py | 67 ++- chia/_tests/wallet/test_conditions.py | 18 +- .../_tests/wallet/test_new_wallet_protocol.py | 40 +- chia/_tests/wallet/test_nft_store.py | 3 +- .../wallet/test_offer_parsing_performance.py | 2 +- chia/_tests/wallet/test_puzzle_store.py | 5 +- chia/_tests/wallet/test_signer_protocol.py | 20 +- .../wallet/test_singleton_lifecycle_fast.py | 50 +- chia/_tests/wallet/test_transaction_store.py | 16 +- chia/_tests/wallet/test_util.py | 6 +- chia/_tests/wallet/test_wallet.py | 18 +- .../_tests/wallet/test_wallet_action_scope.py | 12 +- chia/_tests/wallet/test_wallet_blockchain.py | 6 +- chia/_tests/wallet/test_wallet_coin_store.py | 66 +-- chia/_tests/wallet/test_wallet_node.py | 16 +- chia/_tests/wallet/test_wallet_retry.py | 4 +- .../wallet/test_wallet_state_manager.py | 4 +- chia/_tests/wallet/test_wallet_utils.py | 9 +- .../wallet/vc_wallet/test_cr_outer_puzzle.py | 4 +- .../wallet/vc_wallet/test_vc_lifecycle.py | 16 +- .../_tests/wallet/vc_wallet/test_vc_wallet.py | 5 +- chia/_tests/wallet/wallet_block_tools.py | 36 +- chia/_tests/weight_proof/test_weight_proof.py | 58 +-- chia/clvm/spend_sim.py | 81 ++-- chia/cmds/beta.py | 4 +- chia/cmds/beta_funcs.py | 8 +- chia/cmds/check_wallet_db.py | 43 +- chia/cmds/cmd_classes.py | 58 +-- chia/cmds/cmds_util.py | 29 +- chia/cmds/coin_funcs.py | 11 +- chia/cmds/coins.py | 7 +- chia/cmds/dao.py | 23 +- chia/cmds/dao_funcs.py | 22 +- chia/cmds/data.py | 13 +- chia/cmds/data_funcs.py | 41 +- chia/cmds/db_backup_func.py | 4 +- chia/cmds/db_upgrade_func.py | 6 +- chia/cmds/db_validate_func.py | 4 +- chia/cmds/farm_funcs.py | 16 +- chia/cmds/init_funcs.py | 12 +- chia/cmds/installers.py | 6 +- chia/cmds/keys.py | 10 +- chia/cmds/keys_funcs.py | 54 +-- chia/cmds/passphrase_funcs.py | 8 +- chia/cmds/peer_funcs.py | 8 +- chia/cmds/plotnft_funcs.py | 23 +- chia/cmds/rpc.py | 26 +- chia/cmds/show_funcs.py | 8 +- chia/cmds/signer.py | 29 +- chia/cmds/sim_funcs.py | 14 +- chia/cmds/start_funcs.py | 6 +- chia/cmds/stop.py | 4 +- chia/cmds/units.py | 4 +- chia/cmds/wallet.py | 41 +- chia/cmds/wallet_funcs.py | 117 ++--- chia/consensus/block_body_validation.py | 39 +- chia/consensus/block_creation.py | 29 +- chia/consensus/block_header_validation.py | 6 +- chia/consensus/block_root_validation.py | 10 +- chia/consensus/blockchain.py | 70 +-- chia/consensus/blockchain_interface.py | 20 +- chia/consensus/default_constants.py | 4 +- chia/consensus/difficulty_adjustment.py | 10 +- chia/consensus/find_fork_point.py | 10 +- chia/consensus/full_block_to_block_record.py | 8 +- chia/consensus/get_block_challenge.py | 4 +- chia/consensus/get_block_generator.py | 7 +- chia/consensus/multiprocess_validation.py | 17 +- chia/consensus/vdf_info_computation.py | 6 +- chia/daemon/client.py | 21 +- chia/daemon/keychain_proxy.py | 14 +- chia/daemon/keychain_server.py | 36 +- chia/daemon/server.py | 155 +++--- chia/data_layer/data_layer.py | 119 ++--- chia/data_layer/data_layer_errors.py | 6 +- chia/data_layer/data_layer_server.py | 4 +- chia/data_layer/data_layer_util.py | 142 +++--- chia/data_layer/data_layer_wallet.py | 92 ++-- chia/data_layer/data_store.py | 127 ++--- chia/data_layer/dl_wallet_store.py | 20 +- chia/data_layer/download_data.py | 4 +- chia/data_layer/s3_plugin_service.py | 16 +- chia/data_layer/util/plugin.py | 3 +- chia/farmer/farmer.py | 75 +-- chia/farmer/farmer_api.py | 14 +- chia/full_node/block_height_map.py | 10 +- chia/full_node/block_store.py | 56 +-- chia/full_node/coin_store.py | 73 +-- chia/full_node/fee_estimate.py | 4 +- chia/full_node/fee_estimation.py | 3 +- chia/full_node/fee_history.py | 11 +- chia/full_node/fee_tracker.py | 42 +- chia/full_node/full_node.py | 120 ++--- chia/full_node/full_node_api.py | 76 +-- chia/full_node/full_node_store.py | 82 ++-- chia/full_node/hint_management.py | 8 +- chia/full_node/hint_store.py | 17 +- chia/full_node/mempool.py | 39 +- chia/full_node/mempool_check_conditions.py | 12 +- chia/full_node/mempool_manager.py | 77 +-- chia/full_node/pending_tx_cache.py | 14 +- chia/full_node/subscriptions.py | 47 +- chia/full_node/sync_store.py | 15 +- chia/full_node/tx_processing_queue.py | 6 +- chia/full_node/weight_proof.py | 134 ++--- chia/harvester/harvester.py | 17 +- chia/harvester/harvester_api.py | 14 +- chia/introducer/introducer.py | 5 +- chia/legacy/keyring.py | 6 +- chia/plot_sync/delta.py | 16 +- chia/plot_sync/receiver.py | 23 +- chia/plot_sync/sender.py | 19 +- chia/plotters/bladebit.py | 12 +- chia/plotters/chiapos.py | 4 +- chia/plotters/madmax.py | 6 +- chia/plotters/plotters.py | 12 +- chia/plotters/plotters_util.py | 3 +- chia/plotting/cache.py | 15 +- chia/plotting/check_plots.py | 6 +- chia/plotting/create_plots.py | 12 +- chia/plotting/manager.py | 38 +- chia/plotting/util.py | 30 +- chia/pools/pool_config.py | 12 +- chia/pools/pool_puzzles.py | 16 +- chia/pools/pool_wallet.py | 64 +-- chia/pools/pool_wallet_info.py | 4 +- chia/protocols/full_node_protocol.py | 6 +- chia/protocols/harvester_protocol.py | 24 +- chia/protocols/introducer_protocol.py | 3 +- chia/protocols/shared_protocol.py | 6 +- chia/protocols/timelord_protocol.py | 4 +- chia/protocols/wallet_protocol.py | 60 +-- chia/rpc/crawler_rpc_api.py | 12 +- chia/rpc/data_layer_rpc_api.py | 82 ++-- chia/rpc/data_layer_rpc_client.py | 82 ++-- chia/rpc/data_layer_rpc_util.py | 12 +- chia/rpc/farmer_rpc_api.py | 52 +- chia/rpc/farmer_rpc_client.py | 28 +- chia/rpc/full_node_rpc_api.py | 92 ++-- chia/rpc/full_node_rpc_client.py | 60 +-- chia/rpc/harvester_rpc_api.py | 22 +- chia/rpc/harvester_rpc_client.py | 12 +- chia/rpc/rpc_client.py | 25 +- chia/rpc/rpc_server.py | 53 +- chia/rpc/timelord_rpc_api.py | 6 +- chia/rpc/util.py | 21 +- chia/rpc/wallet_request_types.py | 78 +-- chia/rpc/wallet_rpc_api.py | 458 +++++++++--------- chia/rpc/wallet_rpc_client.py | 317 ++++++------ chia/seeder/crawl_store.py | 14 +- chia/seeder/crawler.py | 42 +- chia/seeder/dns_server.py | 29 +- chia/seeder/start_crawler.py | 4 +- chia/server/address_manager.py | 34 +- chia/server/address_manager_store.py | 34 +- chia/server/capabilities.py | 6 +- chia/server/chia_policy.py | 13 +- chia/server/introducer_peers.py | 6 +- chia/server/node_discovery.py | 36 +- chia/server/rate_limit_numbers.py | 16 +- chia/server/rate_limits.py | 3 +- chia/server/server.py | 37 +- chia/server/signal_handlers.py | 5 +- chia/server/ssl_context.py | 10 +- chia/server/start_data_layer.py | 12 +- chia/server/start_farmer.py | 6 +- chia/server/start_full_node.py | 8 +- chia/server/start_harvester.py | 6 +- chia/server/start_introducer.py | 4 +- chia/server/start_service.py | 32 +- chia/server/start_timelord.py | 4 +- chia/server/start_wallet.py | 4 +- chia/server/upnp.py | 7 +- chia/server/ws_connection.py | 23 +- chia/simulator/block_tools.py | 111 ++--- chia/simulator/full_node_simulator.py | 57 +-- chia/simulator/setup_services.py | 11 +- chia/simulator/simulator_full_node_rpc_api.py | 26 +- .../simulator_full_node_rpc_client.py | 8 +- chia/simulator/simulator_test_tools.py | 15 +- chia/simulator/socket.py | 3 +- chia/simulator/ssl_certs.py | 10 +- chia/simulator/ssl_certs_1.py | 6 +- chia/simulator/ssl_certs_10.py | 6 +- chia/simulator/ssl_certs_2.py | 6 +- chia/simulator/ssl_certs_3.py | 6 +- chia/simulator/ssl_certs_4.py | 6 +- chia/simulator/ssl_certs_5.py | 6 +- chia/simulator/ssl_certs_6.py | 6 +- chia/simulator/ssl_certs_7.py | 6 +- chia/simulator/ssl_certs_8.py | 6 +- chia/simulator/ssl_certs_9.py | 6 +- chia/simulator/start_simulator.py | 6 +- chia/simulator/wallet_tools.py | 36 +- chia/ssl/create_ssl.py | 22 +- chia/timelord/iters_from_block.py | 4 +- chia/timelord/timelord.py | 43 +- chia/timelord/timelord_launcher.py | 9 +- chia/timelord/timelord_state.py | 4 +- chia/types/block_protocol.py | 4 +- chia/types/blockchain_format/coin.py | 6 +- chia/types/blockchain_format/program.py | 22 +- chia/types/blockchain_format/tree_hash.py | 18 +- chia/types/coin_spend.py | 12 +- chia/types/condition_with_args.py | 3 +- chia/types/eligible_coin_spends.py | 25 +- chia/types/generator_types.py | 3 +- chia/types/internal_mempool_item.py | 3 +- chia/types/mempool_item.py | 14 +- chia/types/mempool_submission_status.py | 4 +- chia/types/transaction_queue_entry.py | 4 +- chia/types/unfinished_header_block.py | 4 +- chia/types/weight_proof.py | 11 +- chia/util/action_scope.py | 15 +- chia/util/api_decorators.py | 10 +- chia/util/async_pool.py | 5 +- chia/util/augmented_chain.py | 16 +- chia/util/batches.py | 5 +- chia/util/bech32m.py | 17 +- chia/util/beta_metrics.py | 6 +- chia/util/block_cache.py | 10 +- chia/util/check_fork_next_block.py | 5 +- chia/util/chia_logging.py | 12 +- chia/util/collection.py | 4 +- chia/util/condition_tools.py | 34 +- chia/util/config.py | 47 +- chia/util/db_wrapper.py | 13 +- chia/util/dump_keyring.py | 4 +- chia/util/errors.py | 6 +- chia/util/file_keyring.py | 27 +- chia/util/full_block_utils.py | 10 +- chia/util/generator_tools.py | 11 +- chia/util/keychain.py | 35 +- chia/util/keyring_wrapper.py | 4 +- chia/util/limited_semaphore.py | 2 +- chia/util/lock.py | 4 +- chia/util/log_exceptions.py | 5 +- chia/util/logging.py | 2 +- chia/util/math.py | 6 +- chia/util/network.py | 13 +- chia/util/paginator.py | 2 +- chia/util/permissions.py | 3 +- chia/util/pprint.py | 5 +- chia/util/prev_transaction_block.py | 4 +- chia/util/priority_mutex.py | 7 +- chia/util/profiler.py | 3 +- chia/util/service_groups.py | 4 +- chia/util/ssl_check.py | 20 +- chia/util/streamable.py | 78 ++- chia/util/task_timing.py | 15 +- chia/util/timing.py | 3 +- chia/util/vdf_prover.py | 4 +- chia/util/virtual_project_analysis.py | 54 +-- chia/util/ws_message.py | 12 +- chia/wallet/cat_wallet/cat_info.py | 6 +- chia/wallet/cat_wallet/cat_outer_puzzle.py | 6 +- chia/wallet/cat_wallet/cat_utils.py | 9 +- chia/wallet/cat_wallet/cat_wallet.py | 68 +-- chia/wallet/cat_wallet/dao_cat_info.py | 6 +- chia/wallet/cat_wallet/dao_cat_wallet.py | 32 +- chia/wallet/cat_wallet/lineage_store.py | 4 +- chia/wallet/coin_selection.py | 30 +- chia/wallet/conditions.py | 101 ++-- chia/wallet/dao_wallet/dao_info.py | 8 +- chia/wallet/dao_wallet/dao_utils.py | 25 +- chia/wallet/dao_wallet/dao_wallet.py | 46 +- chia/wallet/db_wallet/db_wallet_puzzles.py | 13 +- chia/wallet/derive_keys.py | 20 +- chia/wallet/did_wallet/did_info.py | 6 +- chia/wallet/did_wallet/did_wallet.py | 46 +- chia/wallet/did_wallet/did_wallet_puzzles.py | 13 +- chia/wallet/lineage_proof.py | 8 +- .../nft_wallet/metadata_outer_puzzle.py | 4 +- chia/wallet/nft_wallet/nft_info.py | 8 +- chia/wallet/nft_wallet/nft_puzzles.py | 22 +- chia/wallet/nft_wallet/nft_wallet.py | 148 +++--- .../nft_wallet/ownership_outer_puzzle.py | 4 +- .../nft_wallet/singleton_outer_puzzle.py | 4 +- .../nft_wallet/transfer_program_puzzle.py | 4 +- chia/wallet/nft_wallet/uncurry_nft.py | 4 +- chia/wallet/notification_manager.py | 10 +- chia/wallet/notification_store.py | 12 +- chia/wallet/outer_puzzles.py | 4 +- chia/wallet/payment.py | 9 +- chia/wallet/puzzle_drivers.py | 8 +- chia/wallet/puzzles/clawback/drivers.py | 8 +- .../puzzles/clawback/puzzle_decorator.py | 14 +- chia/wallet/puzzles/load_clvm.py | 5 +- chia/wallet/puzzles/puzzle_utils.py | 14 +- chia/wallet/puzzles/singleton_top_layer.py | 11 +- .../puzzles/singleton_top_layer_v1_1.py | 11 +- chia/wallet/puzzles/tails.py | 50 +- chia/wallet/signer_protocol.py | 15 +- chia/wallet/singleton.py | 4 +- chia/wallet/trade_manager.py | 138 +++--- chia/wallet/trade_record.py | 10 +- chia/wallet/trading/offer.py | 182 +++---- chia/wallet/trading/trade_store.py | 26 +- chia/wallet/transaction_record.py | 29 +- chia/wallet/util/address_type.py | 8 +- chia/wallet/util/blind_signer_tl.py | 17 +- chia/wallet/util/clvm_streamable.py | 30 +- chia/wallet/util/compute_hints.py | 6 +- chia/wallet/util/compute_memos.py | 10 +- chia/wallet/util/curry_and_treehash.py | 5 +- chia/wallet/util/debug_spend_bundle.py | 6 +- chia/wallet/util/merkle_tree.py | 16 +- chia/wallet/util/merkle_utils.py | 16 +- chia/wallet/util/new_peak_queue.py | 6 +- chia/wallet/util/peer_request_cache.py | 16 +- chia/wallet/util/puzzle_compression.py | 3 +- chia/wallet/util/puzzle_decorator.py | 20 +- chia/wallet/util/query_filter.py | 19 +- chia/wallet/util/tx_config.py | 24 +- chia/wallet/util/wallet_sync_utils.py | 32 +- chia/wallet/vc_wallet/cr_cat_drivers.py | 55 +-- chia/wallet/vc_wallet/cr_cat_wallet.py | 72 +-- chia/wallet/vc_wallet/cr_outer_puzzle.py | 8 +- chia/wallet/vc_wallet/vc_drivers.py | 30 +- chia/wallet/vc_wallet/vc_store.py | 18 +- chia/wallet/vc_wallet/vc_wallet.py | 66 +-- chia/wallet/wallet.py | 86 ++-- chia/wallet/wallet_action_scope.py | 27 +- chia/wallet/wallet_blockchain.py | 14 +- chia/wallet/wallet_coin_record.py | 4 +- chia/wallet/wallet_coin_store.py | 20 +- chia/wallet/wallet_info.py | 3 +- chia/wallet/wallet_interested_store.py | 10 +- chia/wallet/wallet_nft_store.py | 12 +- chia/wallet/wallet_node.py | 105 ++-- chia/wallet/wallet_pool_store.py | 3 +- chia/wallet/wallet_protocol.py | 18 +- chia/wallet/wallet_puzzle_store.py | 8 +- chia/wallet/wallet_retry_store.py | 4 +- chia/wallet/wallet_singleton_store.py | 14 +- chia/wallet/wallet_spend_bundle.py | 8 +- chia/wallet/wallet_state_manager.py | 209 ++++---- chia/wallet/wallet_transaction_store.py | 32 +- chia/wallet/wallet_user_store.py | 4 +- chia/wallet/wallet_weight_proof_handler.py | 4 +- install.sh | 6 +- manage-mypy.py | 10 +- poetry.lock | 23 +- pyproject.toml | 4 +- tools/analyze-chain.py | 14 +- tools/analyze_memory_profile.py | 10 +- tools/chialispp.py | 28 +- tools/cpu_utilization.py | 5 +- tools/generate_chain.py | 13 +- tools/manage_clvm.py | 24 +- 549 files changed, 6044 insertions(+), 6237 deletions(-) diff --git a/.github/workflows/check_wheel_availability.yaml b/.github/workflows/check_wheel_availability.yaml index b4f84f8d46fc..6924efd1d88f 100644 --- a/.github/workflows/check_wheel_availability.yaml +++ b/.github/workflows/check_wheel_availability.yaml @@ -43,13 +43,8 @@ jobs: matrix: arm - name: Intel matrix: intel - python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] + python-version: ["3.9", "3.10", "3.11", "3.12"] exclude: - - os: - matrix: macos - arch: - matrix: arm - python-version: "3.8" - os: matrix: windows arch: diff --git a/.github/workflows/pre-commit.yml b/.github/workflows/pre-commit.yml index a6a6aad61482..becd6fb7d295 100644 --- a/.github/workflows/pre-commit.yml +++ b/.github/workflows/pre-commit.yml @@ -41,7 +41,6 @@ jobs: - name: Intel matrix: intel python: - - major_dot_minor: "3.8" - major_dot_minor: "3.9" - major_dot_minor: "3.10" - major_dot_minor: "3.11" diff --git a/.github/workflows/test-install-scripts.yml b/.github/workflows/test-install-scripts.yml index 8ca09d811cd3..bda58f04f054 100644 --- a/.github/workflows/test-install-scripts.yml +++ b/.github/workflows/test-install-scripts.yml @@ -220,7 +220,12 @@ jobs: apt-get install --yes software-properties-common add-apt-repository --yes ppa:git-core/ppa apt-get --yes update - apt-get install --yes git lsb-release sudo python3-venv + apt-get install --yes git lsb-release sudo + MINIMUM=3.9 + if ! apt satisfy --yes "python3-venv (>= ${MINIMUM})" + then + apt-get install --yes python${MINIMUM}-venv + fi - name: Add safe git directory run: git config --global --add safe.directory "$GITHUB_WORKSPACE" diff --git a/.github/workflows/test-single.yml b/.github/workflows/test-single.yml index 2075568d6ea5..dc730e33b3dd 100644 --- a/.github/workflows/test-single.yml +++ b/.github/workflows/test-single.yml @@ -66,15 +66,6 @@ jobs: arch: - matrix: ${{ inputs.arch }} python: - - name: "3.8" - file_name: "3.8" - action: "3.8" - apt: "3.8" - install_sh: "3.8" - matrix: "3.8" - exclude_from: - limited: True - main: True - name: "3.9" file_name: "3.9" action: "3.9" @@ -109,10 +100,6 @@ jobs: limited: True main: True exclude: - - os: - matrix: macos - python: - matrix: "3.8" - arch: matrix: arm configuration: diff --git a/.github/workflows/upload-pypi-source.yml b/.github/workflows/upload-pypi-source.yml index 764f3cbb83b0..fa0f3c89de4f 100644 --- a/.github/workflows/upload-pypi-source.yml +++ b/.github/workflows/upload-pypi-source.yml @@ -56,7 +56,6 @@ jobs: - name: Intel matrix: intel python: - - major_dot_minor: "3.8" - major_dot_minor: "3.9" - major_dot_minor: "3.10" - major_dot_minor: "3.11" @@ -68,12 +67,6 @@ jobs: python manage-mypy.py build-mypy-ini mypy exclude: - - os: - matrix: macos - arch: - matrix: arm - python: - major_dot_minor: "3.8" - os: matrix: windows arch: @@ -174,7 +167,7 @@ jobs: - check strategy: matrix: - python-version: [3.8] + python-version: [3.10] os: [ubuntu-latest] steps: diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 7e7afd4e9dc8..063bfe9dc4e0 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -17,7 +17,7 @@ repos: hooks: - id: pyupgrade name: pyupgrade - entry: ./activated.py pyupgrade --py38-plus --keep-runtime-typing + entry: ./activated.py pyupgrade --py39-plus --keep-runtime-typing language: system types: [python] - repo: local diff --git a/Install.ps1 b/Install.ps1 index 74bb37245f41..051f48cd3198 100644 --- a/Install.ps1 +++ b/Install.ps1 @@ -48,7 +48,7 @@ if ($null -eq (Get-Command py -ErrorAction SilentlyContinue)) Exit 1 } -$supportedPythonVersions = "3.12", "3.11", "3.10", "3.9", "3.8" +$supportedPythonVersions = "3.12", "3.11", "3.10", "3.9" if ("$env:INSTALL_PYTHON_VERSION" -ne "") { $pythonVersion = $env:INSTALL_PYTHON_VERSION diff --git a/PRETTY_GOOD_PRACTICES.md b/PRETTY_GOOD_PRACTICES.md index be0e92541560..e08c5f1d15ad 100644 --- a/PRETTY_GOOD_PRACTICES.md +++ b/PRETTY_GOOD_PRACTICES.md @@ -197,7 +197,7 @@ How about a couple examples. ```python import json from dataclasses import dataclass -from typing import Type, TypeVar +from typing import TypeVar _T_Coin = TypeVar("_T_Coin", bound="Coin") @@ -281,7 +281,6 @@ If it is hard to deal with the knock on complaints from mypy triggered by the hi ### Basic hints ```python -from typing import List def sum_bigger_values(values: List[int], minimum: int) -> int: return sum(value for value in values if value > minimum) @@ -497,7 +496,7 @@ Without writing an implementation, let's see what part of a cache leveraging gen ```python from dataclasses import dataclass, field -from typing import Dict, Generic, Optional, TypeVar +from typing import Generic, Optional, TypeVar KT = TypeVar("KT") VT = TypeVar("VT") diff --git a/README.md b/README.md index bcbec273f809..6655576b72e7 100644 --- a/README.md +++ b/README.md @@ -20,7 +20,7 @@ Chia is a modern cryptocurrency built from scratch, designed to be efficient, de Please check out the [Chia website][link-chia], the [Intro to Chia][link-intro], and [FAQ][link-faq] for information on this project. -Python 3.8.1+ is required. Make sure your default python version is >=3.8.1 by typing `python3`. +Python 3.9+ is required. Make sure your default python version is >=3.9 by typing `python3`. If you are behind a NAT, it can be difficult for peers outside your subnet to reach you when they start up. You can enable [UPnP][link-upnp] on your router or add a NAT (for IPv4 but not IPv6) and firewall rules to allow TCP port 8444 access to your peer. diff --git a/benchmarks/block_ref.py b/benchmarks/block_ref.py index 86994aff9207..640c668773c3 100644 --- a/benchmarks/block_ref.py +++ b/benchmarks/block_ref.py @@ -6,7 +6,7 @@ from dataclasses import dataclass from pathlib import Path from time import monotonic -from typing import List, Optional +from typing import Optional import aiosqlite import click @@ -38,10 +38,10 @@ class BlockInfo: prev_header_hash: bytes32 transactions_generator: Optional[SerializedProgram] - transactions_generator_ref_list: List[uint32] + transactions_generator_ref_list: list[uint32] -def random_refs() -> List[uint32]: +def random_refs() -> list[uint32]: ret = random.sample(transaction_block_heights, DEFAULT_CONSTANTS.MAX_GENERATOR_REF_LIST_SIZE) random.shuffle(ret) return [uint32(i) for i in ret] diff --git a/benchmarks/block_store.py b/benchmarks/block_store.py index 530037ae1cd3..bccf69323617 100644 --- a/benchmarks/block_store.py +++ b/benchmarks/block_store.py @@ -6,7 +6,6 @@ import sys from pathlib import Path from time import monotonic -from typing import List from benchmarks.utils import setup_db from chia._tests.util.benchmarks import ( @@ -72,7 +71,7 @@ async def run_add_block_benchmark(version: int) -> None: if verbose: print("profiling add_full_block", end="") - tx_block_heights: List[uint32] = [] + tx_block_heights: list[uint32] = [] for height in range(block_height, block_height + NUM_ITERS): is_transaction = transaction_block_counter == 0 diff --git a/benchmarks/blockchains.py b/benchmarks/blockchains.py index 2ae42406119c..afcbbd05887d 100644 --- a/benchmarks/blockchains.py +++ b/benchmarks/blockchains.py @@ -3,9 +3,9 @@ import asyncio import cProfile import time +from collections.abc import Iterator from contextlib import contextmanager from subprocess import check_call -from typing import Iterator from chia._tests.util.blockchain import persistent_blocks from chia.simulator.block_tools import create_block_tools_async, test_constants diff --git a/benchmarks/coin_store.py b/benchmarks/coin_store.py index 5b31ff92feb0..cec13c9f22a3 100644 --- a/benchmarks/coin_store.py +++ b/benchmarks/coin_store.py @@ -6,7 +6,6 @@ import sys from pathlib import Path from time import monotonic -from typing import List, Tuple from benchmarks.utils import setup_db from chia._tests.util.benchmarks import rand_hash, rewards @@ -28,9 +27,9 @@ def make_coin() -> Coin: return Coin(rand_hash(), rand_hash(), uint64(1)) -def make_coins(num: int) -> Tuple[List[Coin], List[bytes32]]: - additions: List[Coin] = [] - hashes: List[bytes32] = [] +def make_coins(num: int) -> tuple[list[Coin], list[bytes32]]: + additions: list[Coin] = [] + hashes: list[bytes32] = [] for i in range(num): c = make_coin() additions.append(c) @@ -48,8 +47,8 @@ async def run_new_block_benchmark(version: int) -> None: async with setup_db("coin-store-benchmark.db", version) as db_wrapper: coin_store = await CoinStore.create(db_wrapper) - all_unspent: List[bytes32] = [] - all_coins: List[bytes32] = [] + all_unspent: list[bytes32] = [] + all_coins: list[bytes32] = [] block_height = 1 timestamp = 1631794488 diff --git a/benchmarks/mempool-long-lived.py b/benchmarks/mempool-long-lived.py index 564a99812cd0..877fa8651971 100644 --- a/benchmarks/mempool-long-lived.py +++ b/benchmarks/mempool-long-lived.py @@ -1,9 +1,10 @@ from __future__ import annotations import asyncio +from collections.abc import Collection from dataclasses import dataclass from time import monotonic -from typing import Collection, Dict, List, Optional +from typing import Optional from chia_rs import G2Element from clvm.casts import int_to_bytes @@ -79,10 +80,10 @@ def fake_block_record(block_height: uint32, timestamp: uint64) -> BenchBlockReco async def run_mempool_benchmark() -> None: - coin_records: Dict[bytes32, CoinRecord] = {} + coin_records: dict[bytes32, CoinRecord] = {} - async def get_coin_record(coin_ids: Collection[bytes32]) -> List[CoinRecord]: - ret: List[CoinRecord] = [] + async def get_coin_record(coin_ids: Collection[bytes32]) -> list[CoinRecord]: + ret: list[CoinRecord] = [] for name in coin_ids: r = coin_records.get(name) if r is not None: diff --git a/benchmarks/mempool.py b/benchmarks/mempool.py index c7dec604b711..335779000b24 100644 --- a/benchmarks/mempool.py +++ b/benchmarks/mempool.py @@ -2,11 +2,12 @@ import asyncio import cProfile +from collections.abc import Collection, Iterator from contextlib import contextmanager from dataclasses import dataclass from subprocess import check_call from time import monotonic -from typing import Collection, Dict, Iterator, List, Optional, Tuple +from typing import Optional from chia.consensus.coinbase import create_farmer_coin, create_pool_coin from chia.consensus.default_constants import DEFAULT_CONSTANTS @@ -77,10 +78,10 @@ def fake_block_record(block_height: uint32, timestamp: uint64) -> BenchBlockReco async def run_mempool_benchmark() -> None: - all_coins: Dict[bytes32, CoinRecord] = {} + all_coins: dict[bytes32, CoinRecord] = {} - async def get_coin_records(coin_ids: Collection[bytes32]) -> List[CoinRecord]: - ret: List[CoinRecord] = [] + async def get_coin_records(coin_ids: Collection[bytes32]) -> list[CoinRecord]: + ret: list[CoinRecord] = [] for name in coin_ids: r = all_coins.get(name) if r is not None: @@ -93,14 +94,14 @@ async def get_unspent_lineage_info_for_puzzle_hash(_: bytes32) -> Optional[Unspe wt = WalletTool(DEFAULT_CONSTANTS) - spend_bundles: List[List[SpendBundle]] = [] + spend_bundles: list[list[SpendBundle]] = [] # these spend the same coins as spend_bundles but with a higher fee - replacement_spend_bundles: List[List[SpendBundle]] = [] + replacement_spend_bundles: list[list[SpendBundle]] = [] # these spend the same coins as spend_bundles, but they are organized in # much larger bundles - large_spend_bundles: List[List[SpendBundle]] = [] + large_spend_bundles: list[list[SpendBundle]] = [] timestamp = uint64(1631794488) @@ -110,7 +111,7 @@ async def get_unspent_lineage_info_for_puzzle_hash(_: bytes32) -> Optional[Unspe for peer in range(NUM_PEERS): print(f" peer {peer}") print(" reward coins") - unspent: List[Coin] = [] + unspent: list[Coin] = [] for idx in range(NUM_ITERS): height = uint32(height + 1) @@ -170,7 +171,7 @@ async def get_unspent_lineage_info_for_puzzle_hash(_: bytes32) -> Optional[Unspe rec = fake_block_record(height, timestamp) await mempool.new_peak(rec, None) - async def add_spend_bundles(spend_bundles: List[SpendBundle]) -> None: + async def add_spend_bundles(spend_bundles: list[SpendBundle]) -> None: for tx in spend_bundles: spend_bundle_id = tx.name() npc = await mempool.pre_validate_spendbundle(tx, spend_bundle_id) @@ -239,7 +240,7 @@ async def add_spend_bundles(spend_bundles: List[SpendBundle]) -> None: print(f" per call: {(stop - start) / 500 * 1000:0.2f}ms") print("\nProfiling new_peak() (optimized)") - blocks: List[Tuple[BenchBlockRecord, List[bytes32]]] = [] + blocks: list[tuple[BenchBlockRecord, list[bytes32]]] = [] for coin_id in all_coins.keys(): height = uint32(height + 1) timestamp = uint64(timestamp + 19) diff --git a/benchmarks/streamable.py b/benchmarks/streamable.py index 2d41af918145..386da56a6917 100644 --- a/benchmarks/streamable.py +++ b/benchmarks/streamable.py @@ -6,7 +6,7 @@ from enum import Enum from statistics import stdev from time import process_time as clock -from typing import Any, Callable, Dict, List, Optional, TextIO, Tuple, Type, Union +from typing import Any, Callable, Optional, TextIO, Union import click @@ -33,9 +33,9 @@ class BenchmarkInner(Streamable): @dataclass(frozen=True) class BenchmarkMiddle(Streamable): a: uint64 - b: List[bytes32] - c: Tuple[str, bool, uint8, List[bytes]] - d: Tuple[BenchmarkInner, BenchmarkInner] + b: list[bytes32] + c: tuple[str, bool, uint8, list[bytes]] + d: tuple[BenchmarkInner, BenchmarkInner] e: BenchmarkInner @@ -45,8 +45,8 @@ class BenchmarkClass(Streamable): a: Optional[BenchmarkMiddle] b: Optional[BenchmarkMiddle] c: BenchmarkMiddle - d: List[BenchmarkMiddle] - e: Tuple[BenchmarkMiddle, BenchmarkMiddle, BenchmarkMiddle] + d: list[BenchmarkMiddle] + e: tuple[BenchmarkMiddle, BenchmarkMiddle, BenchmarkMiddle] def get_random_inner() -> BenchmarkInner: @@ -55,9 +55,9 @@ def get_random_inner() -> BenchmarkInner: def get_random_middle() -> BenchmarkMiddle: a: uint64 = uint64(10) - b: List[bytes32] = [rand_hash() for _ in range(a)] - c: Tuple[str, bool, uint8, List[bytes]] = ("benchmark", False, uint8(1), [rand_bytes(a) for _ in range(a)]) - d: Tuple[BenchmarkInner, BenchmarkInner] = (get_random_inner(), get_random_inner()) + b: list[bytes32] = [rand_hash() for _ in range(a)] + c: tuple[str, bool, uint8, list[bytes]] = ("benchmark", False, uint8(1), [rand_bytes(a) for _ in range(a)]) + d: tuple[BenchmarkInner, BenchmarkInner] = (get_random_inner(), get_random_inner()) e: BenchmarkInner = get_random_inner() return BenchmarkMiddle(a, b, c, d, e) @@ -66,8 +66,8 @@ def get_random_benchmark_object() -> BenchmarkClass: a: Optional[BenchmarkMiddle] = None b: Optional[BenchmarkMiddle] = get_random_middle() c: BenchmarkMiddle = get_random_middle() - d: List[BenchmarkMiddle] = [get_random_middle() for _ in range(5)] - e: Tuple[BenchmarkMiddle, BenchmarkMiddle, BenchmarkMiddle] = ( + d: list[BenchmarkMiddle] = [get_random_middle() for _ in range(5)] + e: tuple[BenchmarkMiddle, BenchmarkMiddle, BenchmarkMiddle] = ( get_random_middle(), get_random_middle(), get_random_middle(), @@ -146,12 +146,12 @@ class ModeParameter: @dataclass class BenchmarkParameter: - data_class: Type[Any] + data_class: type[Any] object_creation_cb: Callable[[], Any] - mode_parameter: Dict[Mode, Optional[ModeParameter]] + mode_parameter: dict[Mode, Optional[ModeParameter]] -benchmark_parameter: Dict[Data, BenchmarkParameter] = { +benchmark_parameter: dict[Data, BenchmarkParameter] = { Data.benchmark: BenchmarkParameter( BenchmarkClass, get_random_benchmark_object, @@ -177,8 +177,8 @@ class BenchmarkParameter: } -def run_for_ms(cb: Callable[[], Any], ms_to_run: int = 100) -> List[int]: - us_iteration_results: List[int] = [] +def run_for_ms(cb: Callable[[], Any], ms_to_run: int = 100) -> list[int]: + us_iteration_results: list[int] = [] start = clock() while int((clock() - start) * 1000) < ms_to_run: start_iteration = clock() @@ -188,12 +188,12 @@ def run_for_ms(cb: Callable[[], Any], ms_to_run: int = 100) -> List[int]: return us_iteration_results -def calc_stdev_percent(iterations: List[int], avg: float) -> float: +def calc_stdev_percent(iterations: list[int], avg: float) -> float: deviation = 0 if len(iterations) < 2 else int(stdev(iterations) * 100) / 100 return int((deviation / avg * 100) * 100) / 100 -def pop_data(key: str, *, old: Dict[str, Any], new: Dict[str, Any]) -> Tuple[Any, Any]: +def pop_data(key: str, *, old: dict[str, Any], new: dict[str, Any]) -> tuple[Any, Any]: if key not in old: sys.exit(f"{key} missing in old") if key not in new: @@ -206,7 +206,7 @@ def print_compare_row(c0: str, c1: Union[str, float], c2: Union[str, float], c3: def compare_results( - old: Dict[str, Dict[str, Dict[str, Union[float, int]]]], new: Dict[str, Dict[str, Dict[str, Union[float, int]]]] + old: dict[str, dict[str, dict[str, Union[float, int]]]], new: dict[str, dict[str, dict[str, Union[float, int]]]] ) -> None: old_version, new_version = pop_data("version", old=old, new=new) if old_version != new_version: @@ -233,8 +233,8 @@ def compare_results( @click.option("-o", "--output", type=click.File("w"), help="Write the results to a file") @click.option("-c", "--compare", type=click.File("r"), help="Compare to the results from a file") def run(data: Data, mode: Mode, runs: int, ms: int, live: bool, output: TextIO, compare: TextIO) -> None: - results: Dict[Data, Dict[Mode, List[List[int]]]] = {} - bench_results: Dict[str, Any] = {"version": _version, "commit_hash": get_commit_hash()} + results: dict[Data, dict[Mode, list[list[int]]]] = {} + bench_results: dict[str, Any] = {"version": _version, "commit_hash": get_commit_hash()} for current_data, parameter in benchmark_parameter.items(): if data == Data.all or current_data == data: results[current_data] = {} @@ -253,12 +253,12 @@ def run(data: Data, mode: Mode, runs: int, ms: int, live: bool, output: TextIO, for current_mode, current_mode_parameter in parameter.mode_parameter.items(): results[current_data][current_mode] = [] if mode == Mode.all or current_mode == mode: - us_iteration_results: List[int] - all_results: List[List[int]] = results[current_data][current_mode] + us_iteration_results: list[int] + all_results: list[list[int]] = results[current_data][current_mode] obj = parameter.object_creation_cb() def get_bench_results() -> BenchmarkResults: - all_runtimes: List[int] = [x for inner in all_results for x in inner] + all_runtimes: list[int] = [x for inner in all_results for x in inner] total_iterations: int = len(all_runtimes) total_elapsed_us: int = sum(all_runtimes) avg_iterations: float = total_iterations / len(all_results) diff --git a/benchmarks/utils.py b/benchmarks/utils.py index fd8d9ae74cbe..759c526990f2 100644 --- a/benchmarks/utils.py +++ b/benchmarks/utils.py @@ -5,8 +5,9 @@ import os import subprocess import sys +from collections.abc import AsyncIterator from pathlib import Path -from typing import Any, AsyncIterator, Generic, Optional, Type, TypeVar, Union +from typing import Any, Generic, Optional, TypeVar, Union import click @@ -17,7 +18,7 @@ # Workaround to allow `Enum` with click.Choice: https://github.com/pallets/click/issues/605#issuecomment-901099036 class EnumType(click.Choice, Generic[_T_Enum]): - def __init__(self, enum: Type[_T_Enum], case_sensitive: bool = False) -> None: + def __init__(self, enum: type[_T_Enum], case_sensitive: bool = False) -> None: self.__enum = enum super().__init__(choices=[item.value for item in enum], case_sensitive=case_sensitive) diff --git a/chia/_tests/blockchain/blockchain_test_utils.py b/chia/_tests/blockchain/blockchain_test_utils.py index db59b79224be..816922a8a039 100644 --- a/chia/_tests/blockchain/blockchain_test_utils.py +++ b/chia/_tests/blockchain/blockchain_test_utils.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import List, Optional +from typing import Optional from chia_rs import BLSCache @@ -76,7 +76,7 @@ async def _validate_and_add_block( else: # validate_signatures must be False in order to trigger add_block() to # validate the signature. - pre_validation_results: List[PreValidationResult] = await pre_validate_blocks_multiprocessing( + pre_validation_results: list[PreValidationResult] = await pre_validate_blocks_multiprocessing( blockchain.constants, blockchain, [block], @@ -138,7 +138,7 @@ async def _validate_and_add_block( async def _validate_and_add_block_multi_error( blockchain: Blockchain, block: FullBlock, - expected_errors: List[Err], + expected_errors: list[Err], skip_prevalidation: bool = False, fork_info: Optional[ForkInfo] = None, ) -> None: @@ -156,7 +156,7 @@ async def _validate_and_add_block_multi_error( async def _validate_and_add_block_multi_result( blockchain: Blockchain, block: FullBlock, - expected_result: List[AddBlockResult], + expected_result: list[AddBlockResult], skip_prevalidation: bool = False, fork_info: Optional[ForkInfo] = None, ) -> None: @@ -170,7 +170,7 @@ async def _validate_and_add_block_multi_result( except Exception as e: assert isinstance(e, AssertionError) assert "Block was not added" in e.args[0] - expected_list: List[str] = [f"Block was not added: {res}" for res in expected_result] + expected_list: list[str] = [f"Block was not added: {res}" for res in expected_result] if e.args[0] not in expected_list: raise AssertionError(f"{e.args[0].split('Block was not added: ')[1]} not in {expected_result}") diff --git a/chia/_tests/blockchain/test_augmented_chain.py b/chia/_tests/blockchain/test_augmented_chain.py index 9e40e79e651a..16d271de89ed 100644 --- a/chia/_tests/blockchain/test_augmented_chain.py +++ b/chia/_tests/blockchain/test_augmented_chain.py @@ -1,7 +1,7 @@ from __future__ import annotations from dataclasses import dataclass, field -from typing import TYPE_CHECKING, ClassVar, Dict, List, Optional, Set, cast +from typing import TYPE_CHECKING, ClassVar, Optional, cast import pytest @@ -21,11 +21,11 @@ class NullBlockchain: _protocol_check: ClassVar[BlocksProtocol] = cast("NullBlockchain", None) - added_blocks: Set[bytes32] = field(default_factory=set) - heights: Dict[uint32, bytes32] = field(default_factory=dict) + added_blocks: set[bytes32] = field(default_factory=set) + heights: dict[uint32, bytes32] = field(default_factory=dict) # BlocksProtocol - async def lookup_block_generators(self, header_hash: bytes32, generator_refs: Set[uint32]) -> Dict[uint32, bytes]: + async def lookup_block_generators(self, header_hash: bytes32, generator_refs: set[uint32]) -> dict[uint32, bytes]: raise ValueError(Err.GENERATOR_REF_HAS_NO_GENERATOR) # pragma: no cover async def get_block_record_from_db(self, header_hash: bytes32) -> Optional[BlockRecord]: @@ -53,7 +53,7 @@ def contains_block(self, header_hash: bytes32) -> bool: def contains_height(self, height: uint32) -> bool: return height in self.heights.keys() - async def prev_block_hash(self, header_hashes: List[bytes32]) -> List[bytes32]: + async def prev_block_hash(self, header_hashes: list[bytes32]) -> list[bytes32]: raise KeyError("no block records in NullBlockchain") # pragma: no cover @@ -71,7 +71,7 @@ def BR(b: FullBlock) -> BlockRecord: @pytest.mark.anyio @pytest.mark.limit_consensus_modes(reason="save time") -async def test_augmented_chain(default_10000_blocks: List[FullBlock]) -> None: +async def test_augmented_chain(default_10000_blocks: list[FullBlock]) -> None: blocks = default_10000_blocks # this test blockchain is expected to have block generators at these diff --git a/chia/_tests/blockchain/test_blockchain.py b/chia/_tests/blockchain/test_blockchain.py index 5a734a12e285..ac6516dab2cf 100644 --- a/chia/_tests/blockchain/test_blockchain.py +++ b/chia/_tests/blockchain/test_blockchain.py @@ -3,9 +3,10 @@ import logging import random import time +from collections.abc import AsyncIterator from contextlib import asynccontextmanager from dataclasses import replace -from typing import AsyncIterator, Dict, List, Optional +from typing import Optional import pytest from chia_rs import AugSchemeMPL, G2Element, MerkleSet @@ -80,7 +81,7 @@ async def make_empty_blockchain(constants: ConsensusConstants) -> AsyncIterator[ class TestGenesisBlock: @pytest.mark.anyio async def test_block_tools_proofs_400( - self, default_400_blocks: List[FullBlock], blockchain_constants: ConsensusConstants + self, default_400_blocks: list[FullBlock], blockchain_constants: ConsensusConstants ) -> None: vdf, proof = get_vdf_info_and_proof( blockchain_constants, @@ -93,7 +94,7 @@ async def test_block_tools_proofs_400( @pytest.mark.anyio async def test_block_tools_proofs_1000( - self, default_1000_blocks: List[FullBlock], blockchain_constants: ConsensusConstants + self, default_1000_blocks: list[FullBlock], blockchain_constants: ConsensusConstants ) -> None: vdf, proof = get_vdf_info_and_proof( blockchain_constants, @@ -150,7 +151,7 @@ async def test_genesis_validate_1(self, empty_blockchain: Blockchain, bt: BlockT class TestBlockHeaderValidation: @pytest.mark.limit_consensus_modes(reason="save time") @pytest.mark.anyio - async def test_long_chain(self, empty_blockchain: Blockchain, default_1000_blocks: List[FullBlock]) -> None: + async def test_long_chain(self, empty_blockchain: Blockchain, default_1000_blocks: list[FullBlock]) -> None: blocks = default_1000_blocks for block in blocks: if ( @@ -371,7 +372,7 @@ async def test_empty_slots_non_genesis(self, empty_blockchain: Blockchain, bt: B async def test_one_sb_per_slot(self, empty_blockchain: Blockchain, bt: BlockTools) -> None: blockchain = empty_blockchain num_blocks = 20 - blocks: List[FullBlock] = [] + blocks: list[FullBlock] = [] for _ in range(num_blocks): blocks = bt.get_consecutive_blocks(1, block_list_input=blocks, skip_slots=1) await _validate_and_add_block(empty_blockchain, blocks[-1]) @@ -383,7 +384,7 @@ async def test_one_sb_per_slot(self, empty_blockchain: Blockchain, bt: BlockTool async def test_all_overflow(self, empty_blockchain: Blockchain, bt: BlockTools) -> None: blockchain = empty_blockchain num_rounds = 5 - blocks: List[FullBlock] = [] + blocks: list[FullBlock] = [] num_blocks = 0 for i in range(1, num_rounds): num_blocks += i @@ -400,7 +401,7 @@ async def test_unf_block_overflow( ) -> None: blockchain = empty_blockchain - blocks: List[FullBlock] = [] + blocks: list[FullBlock] = [] while True: # This creates an overflow block, then a normal block, and then an overflow in the next sub-slot # blocks = bt.get_consecutive_blocks(1, block_list_input=blocks, force_overflow=True) @@ -448,7 +449,7 @@ async def test_unf_block_overflow( async def test_one_sb_per_two_slots(self, empty_blockchain: Blockchain, bt: BlockTools) -> None: blockchain = empty_blockchain num_blocks = 20 - blocks: List[FullBlock] = [] + blocks: list[FullBlock] = [] for _ in range(num_blocks): # Same thing, but 2 sub-slots per block blocks = bt.get_consecutive_blocks(1, block_list_input=blocks, skip_slots=2) await _validate_and_add_block(blockchain, blocks[-1]) @@ -460,7 +461,7 @@ async def test_one_sb_per_two_slots(self, empty_blockchain: Blockchain, bt: Bloc async def test_one_sb_per_five_slots(self, empty_blockchain: Blockchain, bt: BlockTools) -> None: blockchain = empty_blockchain num_blocks = 10 - blocks: List[FullBlock] = [] + blocks: list[FullBlock] = [] for _ in range(num_blocks): # Same thing, but 5 sub-slots per block blocks = bt.get_consecutive_blocks(1, block_list_input=blocks, skip_slots=5) await _validate_and_add_block(blockchain, blocks[-1]) @@ -481,7 +482,7 @@ async def test_basic_chain_overflow(self, empty_blockchain: Blockchain, bt: Bloc async def test_one_sb_per_two_slots_force_overflow(self, empty_blockchain: Blockchain, bt: BlockTools) -> None: blockchain = empty_blockchain num_blocks = 10 - blocks: List[FullBlock] = [] + blocks: list[FullBlock] = [] for _ in range(num_blocks): blocks = bt.get_consecutive_blocks(1, block_list_input=blocks, skip_slots=2, force_overflow=True) await _validate_and_add_block(blockchain, blocks[-1]) @@ -831,7 +832,7 @@ async def test_empty_slot_no_ses(self, empty_blockchain: Blockchain, bt: BlockTo @pytest.mark.anyio async def test_empty_sub_slots_epoch( - self, empty_blockchain: Blockchain, default_400_blocks: List[FullBlock], bt: BlockTools + self, empty_blockchain: Blockchain, default_400_blocks: list[FullBlock], bt: BlockTools ) -> None: # 2m # Tests adding an empty sub slot after the sub-epoch / epoch. @@ -872,7 +873,7 @@ async def test_wrong_cc_hash_rc(self, empty_blockchain: Blockchain, bt: BlockToo @pytest.mark.anyio async def test_invalid_cc_sub_slot_vdf(self, empty_blockchain: Blockchain, bt: BlockTools) -> None: # 2q - blocks: List[FullBlock] = [] + blocks: list[FullBlock] = [] found_overflow_slot: bool = False while not found_overflow_slot: @@ -970,7 +971,7 @@ async def test_invalid_cc_sub_slot_vdf(self, empty_blockchain: Blockchain, bt: B @pytest.mark.anyio async def test_invalid_rc_sub_slot_vdf(self, empty_blockchain: Blockchain, bt: BlockTools) -> None: # 2p - blocks: List[FullBlock] = [] + blocks: list[FullBlock] = [] found_block: bool = False while not found_block: @@ -1223,7 +1224,7 @@ async def test_bad_signage_point_index(self, empty_blockchain: Blockchain, bt: B @pytest.mark.anyio async def test_sp_0_no_sp(self, empty_blockchain: Blockchain, bt: BlockTools) -> None: # 7 - blocks: List[FullBlock] = [] + blocks: list[FullBlock] = [] case_1, case_2 = False, False while not case_1 or not case_2: blocks = bt.get_consecutive_blocks(1, block_list_input=blocks) @@ -1834,7 +1835,7 @@ async def test_pre_validation_fails_bad_blocks(self, empty_blockchain: Blockchai @pytest.mark.anyio async def test_pre_validation( - self, empty_blockchain: Blockchain, default_1000_blocks: List[FullBlock], bt: BlockTools + self, empty_blockchain: Blockchain, default_1000_blocks: list[FullBlock], bt: BlockTools ) -> None: blocks = default_1000_blocks[:100] start = time.time() @@ -1936,7 +1937,7 @@ async def test_conditions( else: assert False - conditions: Dict[ConditionOpcode, List[ConditionWithArgs]] = { + conditions: dict[ConditionOpcode, list[ConditionWithArgs]] = { opcode: [ConditionWithArgs(opcode, args + ([b"garbage"] if with_garbage else []))] } @@ -1953,7 +1954,7 @@ async def test_conditions( ) ssi = b.constants.SUB_SLOT_ITERS_STARTING diff = b.constants.DIFFICULTY_STARTING - pre_validation_results: List[PreValidationResult] = await pre_validate_blocks_multiprocessing( + pre_validation_results: list[PreValidationResult] = await pre_validate_blocks_multiprocessing( b.constants, b, [blocks[-1]], @@ -2075,7 +2076,7 @@ async def test_timelock_conditions( ) ssi = b.constants.SUB_SLOT_ITERS_STARTING diff = b.constants.DIFFICULTY_STARTING - pre_validation_results: List[PreValidationResult] = await pre_validate_blocks_multiprocessing( + pre_validation_results: list[PreValidationResult] = await pre_validate_blocks_multiprocessing( b.constants, b, [blocks[-1]], @@ -2154,7 +2155,7 @@ async def test_aggsig_garbage( ) ssi = b.constants.SUB_SLOT_ITERS_STARTING diff = b.constants.DIFFICULTY_STARTING - pre_validation_results: List[PreValidationResult] = await pre_validate_blocks_multiprocessing( + pre_validation_results: list[PreValidationResult] = await pre_validate_blocks_multiprocessing( b.constants, b, [blocks[-1]], @@ -2278,7 +2279,7 @@ async def test_ephemeral_timelock( ) ssi = b.constants.SUB_SLOT_ITERS_STARTING diff = b.constants.DIFFICULTY_STARTING - pre_validation_results: List[PreValidationResult] = await pre_validate_blocks_multiprocessing( + pre_validation_results: list[PreValidationResult] = await pre_validate_blocks_multiprocessing( b.constants, b, [blocks[-1]], @@ -2604,7 +2605,7 @@ async def test_cost_exceeds_max( wt: WalletTool = bt.get_pool_wallet_tool() - condition_dict: Dict[ConditionOpcode, List[ConditionWithArgs]] = {ConditionOpcode.CREATE_COIN: []} + condition_dict: dict[ConditionOpcode, list[ConditionWithArgs]] = {ConditionOpcode.CREATE_COIN: []} for i in range(7_000): output = ConditionWithArgs(ConditionOpcode.CREATE_COIN, [bt.pool_ph, int_to_bytes(i)]) condition_dict[ConditionOpcode.CREATE_COIN].append(output) @@ -2637,7 +2638,7 @@ async def test_cost_exceeds_max( ) )[1] assert err in [Err.BLOCK_COST_EXCEEDS_MAX] - results: List[PreValidationResult] = await pre_validate_blocks_multiprocessing( + results: list[PreValidationResult] = await pre_validate_blocks_multiprocessing( b.constants, b, [blocks[-1]], @@ -2922,7 +2923,7 @@ async def test_duplicate_outputs(self, empty_blockchain: Blockchain, bt: BlockTo wt: WalletTool = bt.get_pool_wallet_tool() - condition_dict: Dict[ConditionOpcode, List[ConditionWithArgs]] = {ConditionOpcode.CREATE_COIN: []} + condition_dict: dict[ConditionOpcode, list[ConditionWithArgs]] = {ConditionOpcode.CREATE_COIN: []} for _ in range(2): output = ConditionWithArgs(ConditionOpcode.CREATE_COIN, [bt.pool_ph, int_to_bytes(1)]) condition_dict[ConditionOpcode.CREATE_COIN].append(output) @@ -3309,9 +3310,9 @@ async def test_long_reorg( self, light_blocks: bool, empty_blockchain: Blockchain, - default_10000_blocks: List[FullBlock], - test_long_reorg_blocks: List[FullBlock], - test_long_reorg_blocks_light: List[FullBlock], + default_10000_blocks: list[FullBlock], + test_long_reorg_blocks: list[FullBlock], + test_long_reorg_blocks_light: list[FullBlock], ) -> None: if light_blocks: reorg_blocks = test_long_reorg_blocks_light[:1650] @@ -3330,7 +3331,7 @@ async def test_long_reorg( print(f"pre-validating {len(blocks)} blocks") ssi = b.constants.SUB_SLOT_ITERS_STARTING diff = b.constants.DIFFICULTY_STARTING - pre_validation_results: List[PreValidationResult] = await pre_validate_blocks_multiprocessing( + pre_validation_results: list[PreValidationResult] = await pre_validate_blocks_multiprocessing( b.constants, b, blocks, @@ -3459,7 +3460,7 @@ async def test_long_reorg( @pytest.mark.anyio async def test_long_compact_blockchain( - self, empty_blockchain: Blockchain, default_2000_blocks_compact: List[FullBlock] + self, empty_blockchain: Blockchain, default_2000_blocks_compact: list[FullBlock] ) -> None: b = empty_blockchain for block in default_2000_blocks_compact: @@ -3583,7 +3584,7 @@ async def test_get_header_blocks_in_range_tx_filter(self, empty_blockchain: Bloc assert blocks_with_filter[header_hash].header_hash == blocks_without_filter[header_hash].header_hash @pytest.mark.anyio - async def test_get_blocks_at(self, empty_blockchain: Blockchain, default_1000_blocks: List[FullBlock]) -> None: + async def test_get_blocks_at(self, empty_blockchain: Blockchain, default_1000_blocks: list[FullBlock]) -> None: b = empty_blockchain heights = [] for block in default_1000_blocks[:200]: @@ -3889,7 +3890,7 @@ async def test_reorg_flip_flop(empty_blockchain: Blockchain, bt: BlockTools) -> block1, block2 = b1, b2 counter += 1 - preval: List[PreValidationResult] = await pre_validate_blocks_multiprocessing( + preval: list[PreValidationResult] = await pre_validate_blocks_multiprocessing( b.constants, b, [block1], @@ -3928,7 +3929,7 @@ async def test_reorg_flip_flop(empty_blockchain: Blockchain, bt: BlockTools) -> await _validate_and_add_block(b, block) -async def test_get_tx_peak(default_400_blocks: List[FullBlock], empty_blockchain: Blockchain) -> None: +async def test_get_tx_peak(default_400_blocks: list[FullBlock], empty_blockchain: Blockchain) -> None: bc = empty_blockchain test_blocks = default_400_blocks[:100] ssi = bc.constants.SUB_SLOT_ITERS_STARTING @@ -3972,8 +3973,8 @@ def to_bytes(gen: Optional[SerializedProgram]) -> bytes: @pytest.mark.limit_consensus_modes(reason="block heights for generators differ between test chains in different modes") @pytest.mark.parametrize("clear_cache", [True, False]) async def test_lookup_block_generators( - default_10000_blocks: List[FullBlock], - test_long_reorg_blocks_light: List[FullBlock], + default_10000_blocks: list[FullBlock], + test_long_reorg_blocks_light: list[FullBlock], bt: BlockTools, empty_blockchain: Blockchain, clear_cache: bool, diff --git a/chia/_tests/blockchain/test_blockchain_transactions.py b/chia/_tests/blockchain/test_blockchain_transactions.py index 9f4f31f18408..aca7afb2b2ec 100644 --- a/chia/_tests/blockchain/test_blockchain_transactions.py +++ b/chia/_tests/blockchain/test_blockchain_transactions.py @@ -1,7 +1,6 @@ from __future__ import annotations import logging -from typing import Tuple import pytest from clvm.casts import int_to_bytes @@ -32,7 +31,7 @@ class TestBlockchainTransactions: @pytest.mark.anyio async def test_basic_blockchain_tx( - self, two_nodes: Tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools] + self, two_nodes: tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools] ) -> None: num_blocks = 10 wallet_a = WALLET_A @@ -98,7 +97,7 @@ async def test_basic_blockchain_tx( @pytest.mark.anyio async def test_validate_blockchain_with_double_spend( - self, two_nodes: Tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools] + self, two_nodes: tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools] ) -> None: num_blocks = 5 wallet_a = WALLET_A @@ -138,7 +137,7 @@ async def test_validate_blockchain_with_double_spend( @pytest.mark.anyio async def test_validate_blockchain_duplicate_output( - self, two_nodes: Tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools] + self, two_nodes: tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools] ) -> None: num_blocks = 3 wallet_a = WALLET_A @@ -178,7 +177,7 @@ async def test_validate_blockchain_duplicate_output( @pytest.mark.anyio async def test_validate_blockchain_with_reorg_double_spend( - self, two_nodes: Tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools] + self, two_nodes: tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools] ) -> None: num_blocks = 10 wallet_a = WALLET_A @@ -287,7 +286,7 @@ async def test_validate_blockchain_with_reorg_double_spend( @pytest.mark.anyio async def test_validate_blockchain_spend_reorg_coin( - self, two_nodes: Tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools], softfork_height: uint32 + self, two_nodes: tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools], softfork_height: uint32 ) -> None: num_blocks = 10 wallet_a = WALLET_A @@ -375,7 +374,7 @@ async def test_validate_blockchain_spend_reorg_coin( @pytest.mark.anyio async def test_validate_blockchain_spend_reorg_cb_coin( - self, two_nodes: Tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools] + self, two_nodes: tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools] ) -> None: num_blocks = 15 wallet_a = WALLET_A @@ -420,7 +419,7 @@ async def test_validate_blockchain_spend_reorg_cb_coin( @pytest.mark.anyio async def test_validate_blockchain_spend_reorg_since_genesis( - self, two_nodes: Tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools] + self, two_nodes: tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools] ) -> None: num_blocks = 10 wallet_a = WALLET_A @@ -471,7 +470,7 @@ async def test_validate_blockchain_spend_reorg_since_genesis( @pytest.mark.anyio async def test_assert_my_coin_id( - self, two_nodes: Tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools] + self, two_nodes: tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools] ) -> None: num_blocks = 10 wallet_a = WALLET_A @@ -543,7 +542,7 @@ async def test_assert_my_coin_id( @pytest.mark.anyio async def test_assert_coin_announcement_consumed( - self, two_nodes: Tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools] + self, two_nodes: tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools] ) -> None: num_blocks = 10 wallet_a = WALLET_A @@ -627,7 +626,7 @@ async def test_assert_coin_announcement_consumed( @pytest.mark.anyio async def test_assert_puzzle_announcement_consumed( - self, two_nodes: Tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools] + self, two_nodes: tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools] ) -> None: num_blocks = 10 wallet_a = WALLET_A @@ -711,7 +710,7 @@ async def test_assert_puzzle_announcement_consumed( @pytest.mark.anyio async def test_assert_height_absolute( - self, two_nodes: Tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools] + self, two_nodes: tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools] ) -> None: num_blocks = 10 wallet_a = WALLET_A @@ -777,7 +776,7 @@ async def test_assert_height_absolute( @pytest.mark.anyio async def test_assert_height_relative( - self, two_nodes: Tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools] + self, two_nodes: tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools] ) -> None: num_blocks = 11 wallet_a = WALLET_A @@ -845,7 +844,7 @@ async def test_assert_height_relative( @pytest.mark.anyio async def test_assert_seconds_relative( - self, two_nodes: Tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools] + self, two_nodes: tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools] ) -> None: num_blocks = 10 wallet_a = WALLET_A @@ -917,7 +916,7 @@ async def test_assert_seconds_relative( @pytest.mark.anyio async def test_assert_seconds_absolute( - self, two_nodes: Tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools] + self, two_nodes: tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools] ) -> None: num_blocks = 10 wallet_a = WALLET_A @@ -991,7 +990,7 @@ async def test_assert_seconds_absolute( @pytest.mark.anyio async def test_assert_fee_condition( - self, two_nodes: Tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools] + self, two_nodes: tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools] ) -> None: num_blocks = 10 wallet_a = WALLET_A diff --git a/chia/_tests/blockchain/test_build_chains.py b/chia/_tests/blockchain/test_build_chains.py index 4904d32e0897..c41458aa1d0b 100644 --- a/chia/_tests/blockchain/test_build_chains.py +++ b/chia/_tests/blockchain/test_build_chains.py @@ -1,7 +1,5 @@ from __future__ import annotations -from typing import List - import pytest from chia.types.full_block import FullBlock @@ -21,41 +19,41 @@ @pytest.mark.build_test_chains -def test_trigger_default_400(default_400_blocks: List[FullBlock]) -> None: +def test_trigger_default_400(default_400_blocks: list[FullBlock]) -> None: pass @pytest.mark.build_test_chains -def test_trigger_default_1000(default_1000_blocks: List[FullBlock]) -> None: +def test_trigger_default_1000(default_1000_blocks: list[FullBlock]) -> None: pass @pytest.mark.build_test_chains -def test_trigger_pre_genesis_empty_1000(pre_genesis_empty_slots_1000_blocks: List[FullBlock]) -> None: +def test_trigger_pre_genesis_empty_1000(pre_genesis_empty_slots_1000_blocks: list[FullBlock]) -> None: pass @pytest.mark.build_test_chains -def test_trigger_default_1500(default_1500_blocks: List[FullBlock]) -> None: +def test_trigger_default_1500(default_1500_blocks: list[FullBlock]) -> None: pass @pytest.mark.build_test_chains def test_trigger_default_10000( - default_10000_blocks: List[FullBlock], - test_long_reorg_blocks: List[FullBlock], - test_long_reorg_blocks_light: List[FullBlock], - test_long_reorg_1500_blocks: List[FullBlock], - test_long_reorg_1500_blocks_light: List[FullBlock], + default_10000_blocks: list[FullBlock], + test_long_reorg_blocks: list[FullBlock], + test_long_reorg_blocks_light: list[FullBlock], + test_long_reorg_1500_blocks: list[FullBlock], + test_long_reorg_1500_blocks_light: list[FullBlock], ) -> None: pass @pytest.mark.build_test_chains -def test_trigger_default_2000_compact(default_2000_blocks_compact: List[FullBlock]) -> None: +def test_trigger_default_2000_compact(default_2000_blocks_compact: list[FullBlock]) -> None: pass @pytest.mark.build_test_chains -def test_trigger_default_10000_compact(default_10000_blocks_compact: List[FullBlock]) -> None: +def test_trigger_default_10000_compact(default_10000_blocks_compact: list[FullBlock]) -> None: pass diff --git a/chia/_tests/blockchain/test_get_block_generator.py b/chia/_tests/blockchain/test_get_block_generator.py index 0bcf025fed8b..d1d57be6c3c5 100644 --- a/chia/_tests/blockchain/test_get_block_generator.py +++ b/chia/_tests/blockchain/test_get_block_generator.py @@ -1,7 +1,7 @@ from __future__ import annotations from dataclasses import dataclass -from typing import Dict, List, Optional, Set +from typing import Optional import pytest from clvm.casts import int_to_bytes @@ -17,7 +17,7 @@ class BR: prev_header_hash: bytes32 transactions_generator: Optional[SerializedProgram] - transactions_generator_ref_list: List[uint32] + transactions_generator_ref_list: list[uint32] @dataclass(frozen=True) @@ -35,15 +35,15 @@ def program(i: int) -> SerializedProgram: return SerializedProgram.from_bytes(int_to_bytes(i)) -async def zero_hits(hh: bytes32, refs: Set[uint32]) -> Dict[uint32, bytes]: +async def zero_hits(hh: bytes32, refs: set[uint32]) -> dict[uint32, bytes]: return {} -async def never_called(hh: bytes32, refs: Set[uint32]) -> Dict[uint32, bytes]: +async def never_called(hh: bytes32, refs: set[uint32]) -> dict[uint32, bytes]: assert False # pragma: no cover -async def only_lookup_5(hh: bytes32, refs: Set[uint32]) -> Dict[uint32, bytes]: +async def only_lookup_5(hh: bytes32, refs: set[uint32]) -> dict[uint32, bytes]: assert refs == {uint32(5)} return {uint32(5): bytes(program(5))} diff --git a/chia/_tests/blockchain/test_lookup_fork_chain.py b/chia/_tests/blockchain/test_lookup_fork_chain.py index cc1476bfe947..42f5ffc63934 100644 --- a/chia/_tests/blockchain/test_lookup_fork_chain.py +++ b/chia/_tests/blockchain/test_lookup_fork_chain.py @@ -1,7 +1,6 @@ from __future__ import annotations from dataclasses import dataclass -from typing import Dict, List import pytest @@ -15,7 +14,7 @@ class DummyChain: - _chain: Dict[bytes32, bytes32] + _chain: dict[bytes32, bytes32] def __init__(self) -> None: self._chain = {} @@ -23,8 +22,8 @@ def __init__(self) -> None: def add_block(self, h: bytes32, prev: bytes32) -> None: self._chain[h] = prev - async def prev_block_hash(self, header_hashes: List[bytes32]) -> List[bytes32]: - ret: List[bytes32] = [] + async def prev_block_hash(self, header_hashes: list[bytes32]) -> list[bytes32]: + ret: list[bytes32] = [] for h in header_hashes: ret.append(self._chain[h]) return ret diff --git a/chia/_tests/build-init-files.py b/chia/_tests/build-init-files.py index 19bc08bc3103..67b5840eec04 100755 --- a/chia/_tests/build-init-files.py +++ b/chia/_tests/build-init-files.py @@ -13,7 +13,6 @@ import logging import pathlib -from typing import List import click @@ -27,8 +26,8 @@ ignores = {"__pycache__", ".pytest_cache"} -def traverse_directory(path: pathlib.Path) -> List[pathlib.Path]: - of_interest: List[pathlib.Path] = [] +def traverse_directory(path: pathlib.Path) -> list[pathlib.Path]: + of_interest: list[pathlib.Path] = [] file_found = False diff --git a/chia/_tests/build-job-matrix.py b/chia/_tests/build-job-matrix.py index 9bc6080875dd..70d2ae374b04 100644 --- a/chia/_tests/build-job-matrix.py +++ b/chia/_tests/build-job-matrix.py @@ -6,7 +6,7 @@ import logging import types from pathlib import Path -from typing import Any, Dict, List +from typing import Any import testconfig @@ -18,7 +18,7 @@ def skip(path: Path) -> bool: return any(part.startswith(("_", ".")) and part != "_tests" for part in path.parts) -def subdirs(per: str) -> List[Path]: +def subdirs(per: str) -> list[Path]: if per == "directory": glob_pattern = "**/" elif per == "file": @@ -44,11 +44,11 @@ def subdirs(per: str) -> List[Path]: return sorted(paths) -def module_dict(module: types.ModuleType) -> Dict[str, Any]: +def module_dict(module: types.ModuleType) -> dict[str, Any]: return {k: v for k, v in module.__dict__.items() if not k.startswith("_") and k != "annotations"} -def dir_config(dir: Path) -> Dict[str, Any]: +def dir_config(dir: Path) -> dict[str, Any]: import importlib module_name = ".".join([*dir.relative_to(root_path).parts, "config"]) @@ -60,14 +60,14 @@ def dir_config(dir: Path) -> Dict[str, Any]: @dataclasses.dataclass class SpecifiedDefaultsError(Exception): - overlap: Dict[str, Any] + overlap: dict[str, Any] def __post_init__(self) -> None: super().__init__() # Overwrite with directory specific values -def update_config(parent: Dict[str, Any], child: Dict[str, Any]) -> Dict[str, Any]: +def update_config(parent: dict[str, Any], child: dict[str, Any]) -> dict[str, Any]: if child is None: return parent conf = child @@ -107,8 +107,8 @@ def update_config(parent: Dict[str, Any], child: Dict[str, Any]) -> Dict[str, An configuration = [] -specified_defaults: Dict[Path, Dict[str, Any]] = {} -pytest_monitor_enabling_paths: List[Path] = [] +specified_defaults: dict[Path, dict[str, Any]] = {} +pytest_monitor_enabling_paths: list[Path] = [] for path, index in test_paths_with_index: if path.is_dir(): @@ -177,7 +177,7 @@ def mung_path(path: Path) -> str: for_matrix = dict(sorted(for_matrix.items())) configuration.append(for_matrix) -messages: List[str] = [] +messages: list[str] = [] if len(specified_defaults) > 0: message = f"Found {len(specified_defaults)} directories with specified defaults" diff --git a/chia/_tests/check_sql_statements.py b/chia/_tests/check_sql_statements.py index df9010318daa..45b4f5f4f37e 100755 --- a/chia/_tests/check_sql_statements.py +++ b/chia/_tests/check_sql_statements.py @@ -3,12 +3,11 @@ import sys from subprocess import check_output -from typing import Dict, Set, Tuple # check for duplicate index names -def check_create(sql_type: str, cwd: str, exemptions: Set[Tuple[str, str]] = set()) -> int: +def check_create(sql_type: str, cwd: str, exemptions: set[tuple[str, str]] = set()) -> int: # the need for this change seems to come from the git precommit plus the python pre-commit environment # having GIT_DIR specified but not GIT_WORK_TREE. this is an issue in some less common git setups # such as with worktrees, at least in particular uses of them. i think that we could switch to letting @@ -19,7 +18,7 @@ def check_create(sql_type: str, cwd: str, exemptions: Set[Tuple[str, str]] = set ret = 0 - items: Dict[str, str] = {} + items: dict[str, str] = {} for line in lines: if f"CREATE {sql_type}" not in line: continue diff --git a/chia/_tests/clvm/coin_store.py b/chia/_tests/clvm/coin_store.py index 397090ee3a21..8aec7fcea419 100644 --- a/chia/_tests/clvm/coin_store.py +++ b/chia/_tests/clvm/coin_store.py @@ -1,8 +1,9 @@ from __future__ import annotations from collections import defaultdict +from collections.abc import Iterator from dataclasses import dataclass, replace -from typing import Dict, Iterator, Optional +from typing import Optional from chia.consensus.constants import ConsensusConstants from chia.consensus.cost_calculator import NPCResult @@ -30,8 +31,8 @@ class CoinTimestamp: class CoinStore: def __init__(self, constants: ConsensusConstants, reward_mask: int = 0): - self._db: Dict[bytes32, CoinRecord] = dict() - self._ph_index: Dict = defaultdict(list) + self._db: dict[bytes32, CoinRecord] = dict() + self._ph_index: dict = defaultdict(list) self._reward_mask = reward_mask self._constants = constants diff --git a/chia/_tests/clvm/test_curry_and_treehash.py b/chia/_tests/clvm/test_curry_and_treehash.py index 16a081484c15..fc2595ba9fb4 100644 --- a/chia/_tests/clvm/test_curry_and_treehash.py +++ b/chia/_tests/clvm/test_curry_and_treehash.py @@ -1,7 +1,5 @@ from __future__ import annotations -from typing import List - import pytest from chia.types.blockchain_format.program import Program @@ -37,7 +35,7 @@ def test_curry_and_treehash() -> None: @pytest.mark.parametrize( "value", [[], [bytes32([3] * 32)], [bytes32([0] * 32), bytes32([1] * 32)], [bytes([1]), bytes([1, 2, 3])]] ) -def test_shatree_atom_list(value: List[bytes]) -> None: +def test_shatree_atom_list(value: list[bytes]) -> None: h1 = shatree_atom_list(value) h2 = Program.to(value).get_tree_hash() assert h1 == h2 diff --git a/chia/_tests/clvm/test_puzzle_drivers.py b/chia/_tests/clvm/test_puzzle_drivers.py index dbd4a805a449..622124de756e 100644 --- a/chia/_tests/clvm/test_puzzle_drivers.py +++ b/chia/_tests/clvm/test_puzzle_drivers.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Any, Dict, Union +from typing import Any, Union import pytest @@ -9,7 +9,7 @@ def test_puzzle_info() -> None: - test_driver: Dict[str, Any] = { + test_driver: dict[str, Any] = { "string": "hello", "bytes": "0xcafef00d", "int": "123", @@ -17,7 +17,7 @@ def test_puzzle_info() -> None: "zero": "0", "nil": "()", } - test_also: Dict[str, Any] = {"type": "TEST", "string": "hello"} + test_also: dict[str, Any] = {"type": "TEST", "string": "hello"} test_driver["also"] = test_also with pytest.raises(ValueError, match="A type is required"): diff --git a/chia/_tests/clvm/test_puzzles.py b/chia/_tests/clvm/test_puzzles.py index 46f2c12e46bb..1d23a68f8db9 100644 --- a/chia/_tests/clvm/test_puzzles.py +++ b/chia/_tests/clvm/test_puzzles.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Iterable, List, Tuple +from collections.abc import Iterable from chia_rs import AugSchemeMPL, G1Element, G2Element @@ -51,7 +51,7 @@ def throwaway_puzzle_hash(index: int, key_lookup: KeyTool) -> bytes32: def do_test_spend( puzzle_reveal: Program, solution: Program, - payments: Iterable[Tuple[bytes32, int]], + payments: Iterable[tuple[bytes32, int]], key_lookup: KeyTool, farm_time: CoinTimestamp = T1, spend_time: CoinTimestamp = T2, @@ -86,7 +86,7 @@ def do_test_spend( assert 0 # make sure we can actually sign the solution - signatures: List[G2Element] = [] + signatures: list[G2Element] = [] for coin_spend in spend_bundle.coin_spends: signature = key_lookup.signature_for_solution(coin_spend, bytes([2] * 32)) signatures.append(signature) @@ -95,7 +95,7 @@ def do_test_spend( def default_payments_and_conditions( initial_index: int, key_lookup: KeyTool -) -> Tuple[List[Tuple[bytes32, int]], Program]: +) -> tuple[list[tuple[bytes32, int]], Program]: # the coin we get from coin_db.farm_coin only has amount 1024, so we can # only make small payments to avoid failing with MINTING_COIN payments = [ diff --git a/chia/_tests/clvm/test_singletons.py b/chia/_tests/clvm/test_singletons.py index 08e823b3b570..8dc2f91d6e37 100644 --- a/chia/_tests/clvm/test_singletons.py +++ b/chia/_tests/clvm/test_singletons.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import List, Optional, Tuple +from typing import Optional import pytest from chia_rs import AugSchemeMPL, G1Element, G2Element, PrivateKey @@ -54,7 +54,7 @@ async def make_and_spend_bundle( sim_client: SimClient, coin: Coin, delegated_puzzle: Program, - coinsols: List[CoinSpend], + coinsols: list[CoinSpend], ex_error: Optional[Err] = None, fail_msg: str = "", cost_logger: Optional[CostLogger] = None, @@ -105,7 +105,7 @@ async def test_singleton_top_layer(version, cost_logger): await sim.farm_block(starting_puzzle.get_tree_hash()) starting_coin: Coin = await sim_client.get_coin_records_by_puzzle_hash(starting_puzzle.get_tree_hash()) starting_coin = starting_coin[0].coin - comment: List[Tuple[str, str]] = [("hello", "world")] + comment: list[tuple[str, str]] = [("hello", "world")] # LAUNCHING # Try to create an even singleton (driver test) diff --git a/chia/_tests/cmds/cmd_test_utils.py b/chia/_tests/cmds/cmd_test_utils.py index 5a3032589000..4a4acc7983eb 100644 --- a/chia/_tests/cmds/cmd_test_utils.py +++ b/chia/_tests/cmds/cmd_test_utils.py @@ -1,10 +1,11 @@ from __future__ import annotations import sys +from collections.abc import AsyncIterator, Iterable from contextlib import asynccontextmanager from dataclasses import dataclass, field from pathlib import Path -from typing import Any, AsyncIterator, Dict, Iterable, List, Optional, Tuple, Type, Union, cast +from typing import Any, Optional, Union, cast from chia_rs import Coin, G2Element @@ -40,25 +41,25 @@ # Any functions that are the same for every command being tested should be below. # Functions that are specific to a command should be in the test file for that command. -logType = Dict[str, Optional[List[Tuple[Any, ...]]]] +logType = dict[str, Optional[list[tuple[Any, ...]]]] @dataclass class TestRpcClient: - client_type: Type[RpcClient] + client_type: type[RpcClient] rpc_port: Optional[uint16] = None root_path: Optional[Path] = None - config: Optional[Dict[str, Any]] = None + config: Optional[dict[str, Any]] = None create_called: bool = field(init=False, default=False) - rpc_log: Dict[str, List[Tuple[Any, ...]]] = field(init=False, default_factory=dict) + rpc_log: dict[str, list[tuple[Any, ...]]] = field(init=False, default_factory=dict) - async def create(self, _: str, rpc_port: uint16, root_path: Path, config: Dict[str, Any]) -> None: + async def create(self, _: str, rpc_port: uint16, root_path: Path, config: dict[str, Any]) -> None: self.rpc_port = rpc_port self.root_path = root_path self.config = config self.create_called = True - def add_to_log(self, method_name: str, args: Tuple[Any, ...]) -> None: + def add_to_log(self, method_name: str, args: tuple[Any, ...]) -> None: if method_name not in self.rpc_log: self.rpc_log[method_name] = [] self.rpc_log[method_name].append(args) @@ -73,12 +74,12 @@ def check_log(self, expected_calls: logType) -> None: @dataclass class TestFarmerRpcClient(TestRpcClient): - client_type: Type[FarmerRpcClient] = field(init=False, default=FarmerRpcClient) + client_type: type[FarmerRpcClient] = field(init=False, default=FarmerRpcClient) @dataclass class TestWalletRpcClient(TestRpcClient): - client_type: Type[WalletRpcClient] = field(init=False, default=WalletRpcClient) + client_type: type[WalletRpcClient] = field(init=False, default=WalletRpcClient) fingerprint: int = field(init=False, default=0) wallet_index: int = field(init=False, default=0) @@ -86,7 +87,7 @@ async def get_sync_status(self) -> GetSyncStatusResponse: self.add_to_log("get_sync_status", ()) return GetSyncStatusResponse(synced=True, syncing=False) - async def get_wallets(self, wallet_type: Optional[WalletType] = None) -> List[Dict[str, Union[str, int]]]: + async def get_wallets(self, wallet_type: Optional[WalletType] = None) -> list[dict[str, Union[str, int]]]: self.add_to_log("get_wallets", (wallet_type,)) # we cant start with zero because ints cant have a leading zero if wallet_type is not None: @@ -131,21 +132,21 @@ async def get_cat_name(self, wallet_id: int) -> str: self.add_to_log("get_cat_name", (wallet_id,)) return "test" + str(wallet_id) - async def sign_message_by_address(self, address: str, message: str) -> Tuple[str, str, str]: + async def sign_message_by_address(self, address: str, message: str) -> tuple[str, str, str]: self.add_to_log("sign_message_by_address", (address, message)) pubkey = bytes([3] * 48).hex() signature = bytes([6] * 576).hex() signing_mode = SigningMode.CHIP_0002.value return pubkey, signature, signing_mode - async def sign_message_by_id(self, id: str, message: str) -> Tuple[str, str, str]: + async def sign_message_by_id(self, id: str, message: str) -> tuple[str, str, str]: self.add_to_log("sign_message_by_id", (id, message)) pubkey = bytes([4] * 48).hex() signature = bytes([7] * 576).hex() signing_mode = SigningMode.CHIP_0002.value return pubkey, signature, signing_mode - async def cat_asset_id_to_name(self, asset_id: bytes32) -> Optional[Tuple[Optional[uint32], str]]: + async def cat_asset_id_to_name(self, asset_id: bytes32) -> Optional[tuple[Optional[uint32], str]]: """ if bytes32([1] * 32), return (uint32(2), "test1"), if bytes32([1] * 32), return (uint32(3), "test2") """ @@ -157,7 +158,7 @@ async def cat_asset_id_to_name(self, asset_id: bytes32) -> Optional[Tuple[Option else: return None - async def get_nft_info(self, coin_id: str, latest: bool = True) -> Dict[str, Any]: + async def get_nft_info(self, coin_id: str, latest: bool = True) -> dict[str, Any]: self.add_to_log("get_nft_info", (coin_id, latest)) coin_id_bytes = bytes32.fromhex(coin_id) nft_info = NFTInfo( @@ -186,9 +187,9 @@ async def get_nft_info(self, coin_id: str, latest: bool = True) -> Dict[str, Any async def nft_calculate_royalties( self, - royalty_assets_dict: Dict[Any, Tuple[Any, uint16]], - fungible_asset_dict: Dict[Any, uint64], - ) -> Dict[Any, List[Dict[str, Any]]]: + royalty_assets_dict: dict[Any, tuple[Any, uint16]], + fungible_asset_dict: dict[Any, uint64], + ) -> dict[Any, list[dict[str, Any]]]: self.add_to_log("nft_calculate_royalties", (royalty_assets_dict, fungible_asset_dict)) return NFTWallet.royalty_calculation( royalty_assets_dict=royalty_assets_dict, @@ -199,7 +200,7 @@ async def get_spendable_coins( self, wallet_id: int, coin_selection_config: CoinSelectionConfig, - ) -> Tuple[List[CoinRecord], List[CoinRecord], List[Coin]]: + ) -> tuple[list[CoinRecord], list[CoinRecord], list[Coin]]: """ We return a tuple containing: (confirmed records, unconfirmed removals, unconfirmed additions) """ @@ -246,9 +247,9 @@ async def get_next_address(self, wallet_id: int, new_address: bool) -> str: async def send_transaction_multi( self, wallet_id: int, - additions: List[Dict[str, object]], + additions: list[dict[str, object]], tx_config: TXConfig, - coins: Optional[List[Coin]] = None, + coins: Optional[list[Coin]] = None, fee: uint64 = uint64(0), push: bool = True, timelock_info: ConditionValidTimes = ConditionValidTimes(), @@ -283,17 +284,17 @@ async def send_transaction_multi( @dataclass class TestFullNodeRpcClient(TestRpcClient): - client_type: Type[FullNodeRpcClient] = field(init=False, default=FullNodeRpcClient) + client_type: type[FullNodeRpcClient] = field(init=False, default=FullNodeRpcClient) async def get_fee_estimate( self, - target_times: Optional[List[int]], + target_times: Optional[list[int]], cost: Optional[int], - ) -> Dict[str, Any]: + ) -> dict[str, Any]: return {} - async def get_blockchain_state(self) -> Dict[str, Any]: - response: Dict[str, Any] = { + async def get_blockchain_state(self) -> dict[str, Any]: + response: dict[str, Any] = { "peak": cast(BlockRecord, create_test_block_record()), "genesis_challenge_initialized": True, "sync": { @@ -331,12 +332,12 @@ async def get_block_record(self, header_hash: bytes32) -> Optional[BlockRecord]: @dataclass class TestDataLayerRpcClient(TestRpcClient): - client_type: Type[DataLayerRpcClient] = field(init=False, default=DataLayerRpcClient) + client_type: type[DataLayerRpcClient] = field(init=False, default=DataLayerRpcClient) @dataclass class TestSimulatorFullNodeRpcClient(TestRpcClient): - client_type: Type[SimulatorFullNodeRpcClient] = field(init=False, default=SimulatorFullNodeRpcClient) + client_type: type[SimulatorFullNodeRpcClient] = field(init=False, default=SimulatorFullNodeRpcClient) @dataclass @@ -354,7 +355,7 @@ class TestRpcClients: default_factory=TestSimulatorFullNodeRpcClient ) - def get_client(self, client_type: Type[_T_RpcClient]) -> _T_RpcClient: + def get_client(self, client_type: type[_T_RpcClient]) -> _T_RpcClient: if client_type == FarmerRpcClient: return cast(FarmerRpcClient, self.farmer_rpc_client) # type: ignore[return-value] elif client_type == WalletRpcClient: @@ -379,12 +380,12 @@ def create_service_and_wallet_client_generators(test_rpc_clients: TestRpcClients @asynccontextmanager async def test_get_any_service_client( - client_type: Type[_T_RpcClient], + client_type: type[_T_RpcClient], rpc_port: Optional[int] = None, root_path: Optional[Path] = None, consume_errors: bool = True, use_ssl: bool = True, - ) -> AsyncIterator[Tuple[_T_RpcClient, Dict[str, Any]]]: + ) -> AsyncIterator[tuple[_T_RpcClient, dict[str, Any]]]: if root_path is None: root_path = default_root @@ -411,7 +412,7 @@ async def test_get_wallet_client( wallet_rpc_port: Optional[int] = None, fingerprint: Optional[int] = None, root_path: Path = default_root, - ) -> AsyncIterator[Tuple[WalletRpcClient, int, Dict[str, Any]]]: + ) -> AsyncIterator[tuple[WalletRpcClient, int, dict[str, Any]]]: async with test_get_any_service_client(WalletRpcClient, wallet_rpc_port, root_path) as (wallet_client, config): wallet_client.fingerprint = fingerprint # type: ignore assert fingerprint is not None @@ -430,7 +431,7 @@ def cli_confirm(input_message: str, abort_message: str = "Did not confirm. Abort chia.cmds.wallet_funcs.cli_confirm = cli_confirm # type: ignore[attr-defined] -def run_cli_command(capsys: object, chia_root: Path, command_list: List[str]) -> str: +def run_cli_command(capsys: object, chia_root: Path, command_list: list[str]) -> str: """ This is just an easy way to run the chia CLI with the given command list. """ @@ -459,7 +460,7 @@ def cli_assert_shortcut(output: str, strings_to_assert: Iterable[str]) -> None: def run_cli_command_and_assert( - capsys: object, chia_root: Path, command_list: List[str], strings_to_assert: Iterable[str] + capsys: object, chia_root: Path, command_list: list[str], strings_to_assert: Iterable[str] ) -> None: """ Runs the command and asserts that all the strings in strings_to_assert are in the output diff --git a/chia/_tests/cmds/conftest.py b/chia/_tests/cmds/conftest.py index 881abcd85be8..51f39f42b02a 100644 --- a/chia/_tests/cmds/conftest.py +++ b/chia/_tests/cmds/conftest.py @@ -1,8 +1,8 @@ from __future__ import annotations import tempfile +from collections.abc import Iterator from pathlib import Path -from typing import Iterator, Tuple import pytest @@ -11,7 +11,7 @@ @pytest.fixture(scope="module") # every file has its own config generated, just to be safe -def get_test_cli_clients() -> Iterator[Tuple[TestRpcClients, Path]]: +def get_test_cli_clients() -> Iterator[tuple[TestRpcClients, Path]]: # we cant use the normal config fixture because it only supports function scope. with tempfile.TemporaryDirectory() as tmp_path: root_path: Path = Path(tmp_path) / "chia_root" diff --git a/chia/_tests/cmds/test_cmd_framework.py b/chia/_tests/cmds/test_cmd_framework.py index 1ed1026844b0..f7b438269721 100644 --- a/chia/_tests/cmds/test_cmd_framework.py +++ b/chia/_tests/cmds/test_cmd_framework.py @@ -1,8 +1,9 @@ from __future__ import annotations import textwrap +from collections.abc import Sequence from dataclasses import asdict -from typing import Any, Dict, List, Optional, Sequence +from typing import Any, Optional import click import pytest @@ -22,7 +23,7 @@ def _cmd() -> None: mock_type = type(cmd.__class__.__name__, (cmd.__class__,), {}) - def dict_compare_with_ignore_context(one: Dict[str, Any], two: Dict[str, Any]) -> None: + def dict_compare_with_ignore_context(one: dict[str, Any], two: dict[str, Any]) -> None: for k, v in one.items(): if k == "context": continue @@ -286,7 +287,7 @@ def run(self) -> None: ... @chia_command(cmd, "temp_cmd_bad_type", "blah") class TempCMDBadType: - sequence: List[int] = option("--sequence") + sequence: list[int] = option("--sequence") def run(self) -> None: ... diff --git a/chia/_tests/cmds/test_daemon.py b/chia/_tests/cmds/test_daemon.py index 8c92a004dc6b..c514889d763f 100644 --- a/chia/_tests/cmds/test_daemon.py +++ b/chia/_tests/cmds/test_daemon.py @@ -2,7 +2,7 @@ import sys from pathlib import Path -from typing import Any, Dict, Optional +from typing import Any, Optional import pytest from _pytest.capture import CaptureFixture @@ -28,7 +28,7 @@ async def is_keyring_locked() -> bool: async def unlock_keyring(_passphrase: str) -> bool: return True - async def connect_to_daemon_and_validate(_root_path: Path, _config: Dict[str, Any]) -> DummyConnection: + async def connect_to_daemon_and_validate(_root_path: Path, _config: dict[str, Any]) -> DummyConnection: return DummyConnection() class DummyKeychain: @@ -74,7 +74,7 @@ async def close() -> None: return None async def create_start_daemon_connection_dummy( - root_path: Path, config: Dict[str, Any], *, skip_keyring: bool + root_path: Path, config: dict[str, Any], *, skip_keyring: bool ) -> DummyDaemon: return DummyDaemon() diff --git a/chia/_tests/cmds/test_farm_cmd.py b/chia/_tests/cmds/test_farm_cmd.py index ae045841adb0..286547d50caa 100644 --- a/chia/_tests/cmds/test_farm_cmd.py +++ b/chia/_tests/cmds/test_farm_cmd.py @@ -1,7 +1,6 @@ from __future__ import annotations import re -from typing import Tuple import pytest from _pytest.capture import CaptureFixture @@ -18,7 +17,7 @@ @pytest.mark.anyio async def test_farm_summary_command( capsys: CaptureFixture[str], - farmer_one_harvester_simulator_wallet: Tuple[ + farmer_one_harvester_simulator_wallet: tuple[ HarvesterService, FarmerService, SimulatorFullNodeService, diff --git a/chia/_tests/cmds/test_show.py b/chia/_tests/cmds/test_show.py index f5083bfb1731..f2fe27545f20 100644 --- a/chia/_tests/cmds/test_show.py +++ b/chia/_tests/cmds/test_show.py @@ -2,7 +2,7 @@ from dataclasses import dataclass from pathlib import Path -from typing import Any, Dict, List, Optional, Tuple +from typing import Any, Optional from chia._tests.cmds.cmd_test_utils import TestFullNodeRpcClient, TestRpcClients, run_cli_command_and_assert from chia._tests.cmds.testing_classes import hash_to_height, height_hash @@ -16,9 +16,9 @@ @dataclass class ShowFullNodeRpcClient(TestFullNodeRpcClient): - async def get_fee_estimate(self, target_times: Optional[List[int]], cost: Optional[int]) -> Dict[str, Any]: + async def get_fee_estimate(self, target_times: Optional[list[int]], cost: Optional[int]) -> dict[str, Any]: self.add_to_log("get_fee_estimate", (target_times, cost)) - response: Dict[str, Any] = { + response: dict[str, Any] = { "current_fee_rate": 0, "estimates": [0, 0, 0], "fee_rate_last_block": 30769.681426718744, @@ -82,7 +82,7 @@ async def get_block(self, header_hash: bytes32) -> Optional[FullBlock]: RPC_CLIENT_TO_USE = ShowFullNodeRpcClient() # pylint: disable=no-value-for-parameter -def test_chia_show(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_chia_show(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client test_rpc_clients.full_node_rpc_client = RPC_CLIENT_TO_USE @@ -106,7 +106,7 @@ def test_chia_show(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, P "Is a Transaction Block?True", ] run_cli_command_and_assert(capsys, root_dir, command_args, assert_list) - expected_calls: dict[str, Optional[List[tuple[Any, ...]]]] = { # name of rpc: (args) + expected_calls: dict[str, Optional[list[tuple[Any, ...]]]] = { # name of rpc: (args) "get_blockchain_state": None, "get_block_record": [(height_hash(height),) for height in [11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 11, 10]], "get_block_record_by_height": [(10,)], diff --git a/chia/_tests/cmds/test_tx_config_args.py b/chia/_tests/cmds/test_tx_config_args.py index 76837d2664a5..e7b2312f6beb 100644 --- a/chia/_tests/cmds/test_tx_config_args.py +++ b/chia/_tests/cmds/test_tx_config_args.py @@ -1,7 +1,8 @@ from __future__ import annotations +from collections.abc import Sequence from pathlib import Path -from typing import Optional, Sequence +from typing import Optional import click from click.testing import CliRunner diff --git a/chia/_tests/cmds/wallet/test_coins.py b/chia/_tests/cmds/wallet/test_coins.py index 4f2d377e08f3..3d49272ec157 100644 --- a/chia/_tests/cmds/wallet/test_coins.py +++ b/chia/_tests/cmds/wallet/test_coins.py @@ -2,7 +2,7 @@ import dataclasses from pathlib import Path -from typing import List, Optional, Tuple +from typing import Optional from chia._tests.cmds.cmd_test_utils import TestRpcClients, TestWalletRpcClient, logType, run_cli_command_and_assert from chia._tests.cmds.wallet.test_consts import FINGERPRINT, FINGERPRINT_ARG, STD_TX, STD_UTX, get_bytes32 @@ -20,7 +20,7 @@ # Coin Commands -def test_coins_get_info(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_coins_get_info(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client @@ -54,7 +54,7 @@ def test_coins_get_info(capsys: object, get_test_cli_clients: Tuple[TestRpcClien test_rpc_clients.wallet_rpc_client.check_log(expected_calls) -def test_coins_combine(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_coins_combine(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client @@ -142,7 +142,7 @@ async def combine_coins( test_rpc_clients.wallet_rpc_client.check_log(expected_calls) -def test_coins_split(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_coins_split(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients test_coin = Coin(Program.to(0).get_tree_hash(), Program.to(1).get_tree_hash(), uint64(10_000_000_000_000)) @@ -156,11 +156,11 @@ async def split_coins( async def get_coin_records_by_names( self, - names: List[bytes32], + names: list[bytes32], include_spent_coins: bool = True, start_height: Optional[int] = None, end_height: Optional[int] = None, - ) -> List[CoinRecord]: + ) -> list[CoinRecord]: cr = CoinRecord( test_coin, uint32(10), diff --git a/chia/_tests/cmds/wallet/test_dao.py b/chia/_tests/cmds/wallet/test_dao.py index 49840f39601e..aa78b24a1c77 100644 --- a/chia/_tests/cmds/wallet/test_dao.py +++ b/chia/_tests/cmds/wallet/test_dao.py @@ -3,7 +3,7 @@ import time from pathlib import Path from secrets import token_bytes -from typing import Any, Dict, List, Optional, Tuple, Union +from typing import Any, Optional, Union import pytest from typing_extensions import override @@ -32,7 +32,7 @@ # DAO Commands -def test_dao_create(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_dao_create(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client @@ -41,7 +41,7 @@ async def create_new_dao_wallet( self, mode: str, tx_config: TXConfig, - dao_rules: Optional[Dict[str, uint64]] = None, + dao_rules: Optional[dict[str, uint64]] = None, amount_of_cats: Optional[uint64] = None, treasury_id: Optional[bytes32] = None, filter_amount: uint64 = uint64(1), @@ -118,17 +118,17 @@ async def create_new_dao_wallet( run_cli_command_and_assert(capsys, root_dir, add_command_args, assert_list) -def test_dao_treasury(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_dao_treasury(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients class DAOCreateRpcClient(TestWalletRpcClient): async def dao_get_treasury_id( self, wallet_id: int, - ) -> Dict[str, str]: + ) -> dict[str, str]: return {"treasury_id": "0xCAFEF00D"} - async def dao_get_treasury_balance(self, wallet_id: int) -> Dict[str, Union[str, bool, Dict[str, int]]]: + async def dao_get_treasury_balance(self, wallet_id: int) -> dict[str, Union[str, bool, dict[str, int]]]: if wallet_id == 2: return {"success": True, "balances": {"xch": 1000000000000, "0xCAFEF00D": 10000000}} else: @@ -150,7 +150,7 @@ async def dao_add_funds_to_treasury( async def dao_get_rules( self, wallet_id: int, - ) -> Dict[str, Dict[str, int]]: + ) -> dict[str, dict[str, int]]: return {"rules": {"proposal_minimum": 100}} @override @@ -201,7 +201,7 @@ async def get_transaction(self, transaction_id: bytes32) -> TransactionRecord: run_cli_command_and_assert(capsys, root_dir, rules_args, rules_asserts) -def test_dao_proposals(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_dao_proposals(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client @@ -210,7 +210,7 @@ async def dao_get_proposals( self, wallet_id: int, include_closed: bool = True, - ) -> Dict[str, Union[bool, int, List[Any]]]: + ) -> dict[str, Union[bool, int, list[Any]]]: proposal = { "proposal_id": "0xCAFEF00D", "amount_voted": uint64(10), @@ -236,11 +236,11 @@ async def dao_parse_proposal( self, wallet_id: int, proposal_id: str, - ) -> Dict[str, Union[bool, Dict[str, Any]]]: + ) -> dict[str, Union[bool, dict[str, Any]]]: if proposal_id == "0xCAFEF00D": puzhash = bytes32(b"1" * 32).hex() asset_id = bytes32(b"2" * 32).hex() - proposal_details: Dict[str, Any] = { + proposal_details: dict[str, Any] = { "proposal_type": "s", "xch_conditions": [{"puzzle_hash": puzhash, "amount": 100}], "asset_conditions": [ @@ -305,13 +305,13 @@ async def dao_create_proposal( wallet_id: int, proposal_type: str, tx_config: TXConfig, - additions: Optional[List[Dict[str, Any]]] = None, + additions: Optional[list[dict[str, Any]]] = None, amount: Optional[uint64] = None, inner_address: Optional[str] = None, asset_id: Optional[str] = None, cat_target_address: Optional[str] = None, vote_amount: Optional[int] = None, - new_dao_rules: Optional[Dict[str, uint64]] = None, + new_dao_rules: Optional[dict[str, uint64]] = None, fee: uint64 = uint64(0), reuse_puzhash: Optional[bool] = None, push: bool = True, @@ -319,7 +319,7 @@ async def dao_create_proposal( ) -> DAOCreateProposalResponse: return DAOCreateProposalResponse([STD_UTX], [STD_TX], bytes32([0] * 32), STD_TX.name, STD_TX) - async def get_wallets(self, wallet_type: Optional[WalletType] = None) -> List[Dict[str, Union[str, int]]]: + async def get_wallets(self, wallet_type: Optional[WalletType] = None) -> list[dict[str, Union[str, int]]]: return [{"id": 1, "type": 0}, {"id": 2, "type": 14}] @override @@ -487,7 +487,7 @@ async def get_transaction(self, transaction_id: bytes32) -> TransactionRecord: run_cli_command_and_assert(capsys, root_dir, mint_args, proposal_asserts) -def test_dao_cats(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_dao_cats(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client @@ -519,7 +519,7 @@ async def dao_exit_lockup( self, wallet_id: int, tx_config: TXConfig, - coins: Optional[List[Dict[str, Union[str, int]]]] = None, + coins: Optional[list[dict[str, Union[str, int]]]] = None, fee: uint64 = uint64(0), reuse_puzhash: Optional[bool] = None, push: bool = True, diff --git a/chia/_tests/cmds/wallet/test_did.py b/chia/_tests/cmds/wallet/test_did.py index 04a433028a72..a26d797ee53b 100644 --- a/chia/_tests/cmds/wallet/test_did.py +++ b/chia/_tests/cmds/wallet/test_did.py @@ -1,7 +1,7 @@ from __future__ import annotations from pathlib import Path -from typing import Dict, List, Optional, Tuple, Union +from typing import Optional, Union from chia_rs import G2Element @@ -22,7 +22,7 @@ # DID Commands -def test_did_create(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_did_create(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client @@ -33,11 +33,11 @@ async def create_new_did_wallet( tx_config: TXConfig, fee: int = 0, name: Optional[str] = "DID Wallet", - backup_ids: Optional[List[str]] = None, + backup_ids: Optional[list[str]] = None, required_num: int = 0, push: bool = True, timelock_info: ConditionValidTimes = ConditionValidTimes(), - ) -> Dict[str, Union[str, int]]: + ) -> dict[str, Union[str, int]]: if backup_ids is None: backup_ids = [] self.add_to_log( @@ -74,7 +74,7 @@ async def create_new_did_wallet( test_rpc_clients.wallet_rpc_client.check_log(expected_calls) -def test_did_sign_message(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_did_sign_message(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client @@ -97,12 +97,12 @@ def test_did_sign_message(capsys: object, get_test_cli_clients: Tuple[TestRpcCli test_rpc_clients.wallet_rpc_client.check_log(expected_calls) -def test_did_set_name(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_did_set_name(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client class DidSetNameRpcClient(TestWalletRpcClient): - async def did_set_wallet_name(self, wallet_id: int, name: str) -> Dict[str, Union[str, int]]: + async def did_set_wallet_name(self, wallet_id: int, name: str) -> dict[str, Union[str, int]]: self.add_to_log("did_set_wallet_name", (wallet_id, name)) return {} @@ -120,12 +120,12 @@ async def did_set_wallet_name(self, wallet_id: int, name: str) -> Dict[str, Unio test_rpc_clients.wallet_rpc_client.check_log(expected_calls) -def test_did_get_did(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_did_get_did(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client class DidGetDidRpcClient(TestWalletRpcClient): - async def get_did_id(self, wallet_id: int) -> Dict[str, str]: + async def get_did_id(self, wallet_id: int) -> dict[str, str]: self.add_to_log("get_did_id", (wallet_id,)) return {"my_did": encode_puzzle_hash(get_bytes32(1), "did:chia:"), "coin_id": get_bytes32(2).hex()} @@ -143,12 +143,12 @@ async def get_did_id(self, wallet_id: int) -> Dict[str, str]: test_rpc_clients.wallet_rpc_client.check_log(expected_calls) -def test_did_get_details(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_did_get_details(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client class DidGetDetailsRpcClient(TestWalletRpcClient): - async def get_did_info(self, coin_id: str, latest: bool) -> Dict[str, object]: + async def get_did_info(self, coin_id: str, latest: bool) -> dict[str, object]: self.add_to_log("get_did_info", (coin_id, latest)) response = { "did_id": encode_puzzle_hash(get_bytes32(2), "did:chia:"), @@ -191,7 +191,7 @@ async def get_did_info(self, coin_id: str, latest: bool) -> Dict[str, object]: test_rpc_clients.wallet_rpc_client.check_log(expected_calls) -def test_did_update_metadata(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_did_update_metadata(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client @@ -199,7 +199,7 @@ class DidUpdateMetadataRpcClient(TestWalletRpcClient): async def update_did_metadata( self, wallet_id: int, - metadata: Dict[str, object], + metadata: dict[str, object], tx_config: TXConfig, push: bool = True, timelock_info: ConditionValidTimes = ConditionValidTimes(), @@ -237,7 +237,7 @@ async def update_did_metadata( test_rpc_clients.wallet_rpc_client.check_log(expected_calls) -def test_did_find_lost(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_did_find_lost(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client @@ -246,9 +246,9 @@ async def find_lost_did( self, coin_id: str, recovery_list_hash: Optional[str], - metadata: Optional[Dict[str, object]], + metadata: Optional[dict[str, object]], num_verification: Optional[int], - ) -> Dict[str, Union[bool, str]]: + ) -> dict[str, Union[bool, str]]: self.add_to_log("find_lost_did", (coin_id, recovery_list_hash, metadata, num_verification)) return {"success": True, "latest_coin_id": get_bytes32(2).hex()} @@ -275,7 +275,7 @@ async def find_lost_did( test_rpc_clients.wallet_rpc_client.check_log(expected_calls) -def test_did_message_spend(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_did_message_spend(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client @@ -284,7 +284,7 @@ async def did_message_spend( self, wallet_id: int, tx_config: TXConfig, - extra_conditions: Tuple[Condition, ...], + extra_conditions: tuple[Condition, ...], push: bool, timelock_info: ConditionValidTimes = ConditionValidTimes(), ) -> DIDMessageSpendResponse: @@ -332,7 +332,7 @@ async def did_message_spend( test_rpc_clients.wallet_rpc_client.check_log(expected_calls) -def test_did_transfer(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_did_transfer(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client diff --git a/chia/_tests/cmds/wallet/test_nft.py b/chia/_tests/cmds/wallet/test_nft.py index 84b5a2da786e..d5b12e593067 100644 --- a/chia/_tests/cmds/wallet/test_nft.py +++ b/chia/_tests/cmds/wallet/test_nft.py @@ -1,7 +1,7 @@ from __future__ import annotations from pathlib import Path -from typing import Any, List, Optional, Tuple +from typing import Any, Optional from chia_rs import G2Element @@ -27,7 +27,7 @@ # NFT Commands -def test_nft_create(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_nft_create(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client @@ -49,7 +49,7 @@ async def create_new_nft_wallet(self, did_id: str, name: Optional[str] = None) - test_rpc_clients.wallet_rpc_client.check_log(expected_calls) -def test_nft_sign_message(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_nft_sign_message(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client @@ -73,7 +73,7 @@ def test_nft_sign_message(capsys: object, get_test_cli_clients: Tuple[TestRpcCli test_rpc_clients.wallet_rpc_client.check_log(expected_calls) -def test_nft_mint(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_nft_mint(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client @@ -88,11 +88,11 @@ async def mint_nft( royalty_address: Optional[str], target_address: Optional[str], hash: str, - uris: List[str], + uris: list[str], meta_hash: str = "", - meta_uris: Optional[List[str]] = None, + meta_uris: Optional[list[str]] = None, license_hash: str = "", - license_uris: Optional[List[str]] = None, + license_uris: Optional[list[str]] = None, edition_total: uint8 = uint8(1), edition_number: uint8 = uint8(1), fee: uint64 = uint64(0), @@ -190,7 +190,7 @@ async def mint_nft( test_rpc_clients.wallet_rpc_client.check_log(expected_calls) -def test_nft_add_uri(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_nft_add_uri(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client @@ -250,7 +250,7 @@ async def add_uri_to_nft( test_rpc_clients.wallet_rpc_client.check_log(expected_calls) -def test_nft_transfer(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_nft_transfer(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client @@ -316,7 +316,7 @@ async def transfer_nft( test_rpc_clients.wallet_rpc_client.check_log(expected_calls) -def test_nft_list(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_nft_list(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client @@ -378,7 +378,7 @@ async def list_nfts(self, wallet_id: int, num: int = 50, start_index: int = 0) - test_rpc_clients.wallet_rpc_client.check_log(expected_calls) -def test_nft_set_did(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_nft_set_did(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client @@ -442,7 +442,7 @@ async def set_nft_did( test_rpc_clients.wallet_rpc_client.check_log(expected_calls) -def test_nft_get_info(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_nft_get_info(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client diff --git a/chia/_tests/cmds/wallet/test_notifications.py b/chia/_tests/cmds/wallet/test_notifications.py index d979574c5b22..538015f0c1e6 100644 --- a/chia/_tests/cmds/wallet/test_notifications.py +++ b/chia/_tests/cmds/wallet/test_notifications.py @@ -1,7 +1,7 @@ from __future__ import annotations from pathlib import Path -from typing import List, Optional, Tuple, cast +from typing import Optional, cast from chia._tests.cmds.cmd_test_utils import TestRpcClients, TestWalletRpcClient, logType, run_cli_command_and_assert from chia._tests.cmds.wallet.test_consts import FINGERPRINT, FINGERPRINT_ARG, get_bytes32 @@ -18,7 +18,7 @@ # Notifications Commands -def test_notifications_send(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_notifications_send(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client @@ -71,7 +71,7 @@ def __init__(self, name: str) -> None: test_rpc_clients.wallet_rpc_client.check_log(expected_calls) -def test_notifications_get(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_notifications_get(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client @@ -105,12 +105,12 @@ async def get_notifications(self, request: GetNotifications) -> GetNotifications test_rpc_clients.wallet_rpc_client.check_log(expected_calls) -def test_notifications_delete(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_notifications_delete(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client class NotificationsDeleteRpcClient(TestWalletRpcClient): - async def delete_notifications(self, ids: Optional[List[bytes32]] = None) -> bool: + async def delete_notifications(self, ids: Optional[list[bytes32]] = None) -> bool: self.add_to_log("delete_notifications", (ids,)) return True diff --git a/chia/_tests/cmds/wallet/test_tx_decorators.py b/chia/_tests/cmds/wallet/test_tx_decorators.py index 588a966e707e..c69baaa321f5 100644 --- a/chia/_tests/cmds/wallet/test_tx_decorators.py +++ b/chia/_tests/cmds/wallet/test_tx_decorators.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Any, List +from typing import Any import click from click.testing import CliRunner @@ -13,7 +13,7 @@ def test_tx_out_cmd() -> None: @click.command() @tx_out_cmd() - def test_cmd(**kwargs: Any) -> List[TransactionRecord]: + def test_cmd(**kwargs: Any) -> list[TransactionRecord]: with open("./temp.push", "w") as file: file.write(str(kwargs["push"])) return [STD_TX, STD_TX] diff --git a/chia/_tests/cmds/wallet/test_vcs.py b/chia/_tests/cmds/wallet/test_vcs.py index 8f82ff05e768..a475d40f3d0e 100644 --- a/chia/_tests/cmds/wallet/test_vcs.py +++ b/chia/_tests/cmds/wallet/test_vcs.py @@ -1,7 +1,7 @@ from __future__ import annotations from pathlib import Path -from typing import Any, Dict, List, Optional, Tuple, cast +from typing import Any, Optional, cast from chia_rs import Coin @@ -22,7 +22,7 @@ # VC Commands -def test_vcs_mint(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_vcs_mint(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client @@ -86,12 +86,12 @@ async def vc_mint( test_rpc_clients.wallet_rpc_client.check_log(expected_calls) -def test_vcs_get(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_vcs_get(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client class VcsGetRpcClient(TestWalletRpcClient): - async def vc_get_list(self, start: int = 0, count: int = 50) -> Tuple[List[VCRecord], Dict[str, Any]]: + async def vc_get_list(self, start: int = 0, count: int = 50) -> tuple[list[VCRecord], dict[str, Any]]: class FakeVC: def __init__(self) -> None: self.launcher_id = get_bytes32(3) @@ -122,7 +122,7 @@ def __getattr__(self, item: str) -> Any: test_rpc_clients.wallet_rpc_client.check_log(expected_calls) -def test_vcs_update_proofs(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_vcs_update_proofs(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client @@ -187,12 +187,12 @@ async def vc_spend( test_rpc_clients.wallet_rpc_client.check_log(expected_calls) -def test_vcs_add_proof_reveal(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_vcs_add_proof_reveal(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client class VcsAddProofRevealRpcClient(TestWalletRpcClient): - async def vc_add_proofs(self, proofs: Dict[str, Any]) -> None: + async def vc_add_proofs(self, proofs: dict[str, Any]) -> None: self.add_to_log("vc_add_proofs", (proofs,)) return None @@ -211,12 +211,12 @@ async def vc_add_proofs(self, proofs: Dict[str, Any]) -> None: test_rpc_clients.wallet_rpc_client.check_log(expected_calls) -def test_vcs_get_proofs_for_root(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_vcs_get_proofs_for_root(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client class VcsGetProofsForRootRpcClient(TestWalletRpcClient): - async def vc_get_proofs_for_root(self, root: bytes32) -> Dict[str, Any]: + async def vc_get_proofs_for_root(self, root: bytes32) -> dict[str, Any]: self.add_to_log("vc_get_proofs_for_root", (root,)) return {"test_proof": "1", "test_proof2": "1"} @@ -232,7 +232,7 @@ async def vc_get_proofs_for_root(self, root: bytes32) -> Dict[str, Any]: test_rpc_clients.wallet_rpc_client.check_log(expected_calls) -def test_vcs_revoke(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_vcs_revoke(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client @@ -303,7 +303,7 @@ async def vc_revoke( test_rpc_clients.wallet_rpc_client.check_log(expected_calls) -def test_vcs_approve_r_cats(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_vcs_approve_r_cats(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client @@ -316,7 +316,7 @@ async def crcat_approve_pending( fee: uint64 = uint64(0), push: bool = True, timelock_info: ConditionValidTimes = ConditionValidTimes(), - ) -> List[TransactionRecord]: + ) -> list[TransactionRecord]: self.add_to_log( "crcat_approve_pending", ( diff --git a/chia/_tests/cmds/wallet/test_wallet.py b/chia/_tests/cmds/wallet/test_wallet.py index 76c1f35e287c..64407d2efe88 100644 --- a/chia/_tests/cmds/wallet/test_wallet.py +++ b/chia/_tests/cmds/wallet/test_wallet.py @@ -3,7 +3,7 @@ import datetime import os from pathlib import Path -from typing import Any, Dict, List, Optional, Tuple, Union +from typing import Any, Optional, Union import importlib_resources import pytest @@ -57,7 +57,7 @@ test_condition_valid_times: ConditionValidTimes = ConditionValidTimes(min_time=uint64(100), max_time=uint64(150)) -def test_get_transaction(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_get_transaction(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client inst_rpc_client = TestWalletRpcClient() # pylint: disable=no-value-for-parameter @@ -97,7 +97,7 @@ def test_get_transaction(capsys: object, get_test_cli_clients: Tuple[TestRpcClie test_rpc_clients.wallet_rpc_client.check_log(expected_calls) -def test_get_transactions(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_get_transactions(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client @@ -112,7 +112,7 @@ async def get_transactions( to_address: Optional[str] = None, type_filter: Optional[TransactionTypeFilter] = None, confirmed: Optional[bool] = None, - ) -> List[TransactionRecord]: + ) -> list[TransactionRecord]: self.add_to_log( "get_transactions", (wallet_id, start, end, sort_key, reverse, to_address, type_filter, confirmed) ) @@ -142,7 +142,7 @@ async def get_transactions( return l_tx_rec - async def get_coin_records(self, request: GetCoinRecords) -> Dict[str, Any]: + async def get_coin_records(self, request: GetCoinRecords) -> dict[str, Any]: self.add_to_log("get_coin_records", (request,)) return { "coin_records": [{"metadata": {"time_lock": 12345678}}], @@ -200,14 +200,14 @@ async def get_coin_records(self, request: GetCoinRecords) -> Dict[str, Any]: test_rpc_clients.wallet_rpc_client.check_log(expected_calls) -def test_show(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_show(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client class ShowRpcClient(TestWalletRpcClient): - async def get_wallets(self, wallet_type: Optional[WalletType] = None) -> List[Dict[str, Union[str, int]]]: + async def get_wallets(self, wallet_type: Optional[WalletType] = None) -> list[dict[str, Union[str, int]]]: self.add_to_log("get_wallets", (wallet_type,)) - wallet_list: List[Dict[str, Union[str, int]]] = [ + wallet_list: list[dict[str, Union[str, int]]] = [ {"data": "", "id": 1, "name": "Chia Wallet", "type": WalletType.STANDARD_WALLET}, { "data": "dc59bcd60ce5fc9c93a5d3b11875486b03efb53a53da61e453f5cf61a774686001ff02ffff01ff02ffff03ff2f" @@ -232,7 +232,7 @@ async def get_height_info(self) -> GetHeightInfoResponse: self.add_to_log("get_height_info", ()) return GetHeightInfoResponse(uint32(10)) - async def get_wallet_balance(self, wallet_id: int) -> Dict[str, uint64]: + async def get_wallet_balance(self, wallet_id: int) -> dict[str, uint64]: self.add_to_log("get_wallet_balance", (wallet_id,)) if wallet_id == 1: amount = uint64(1000000000) @@ -252,7 +252,7 @@ async def get_nft_wallet_did(self, wallet_id: uint8) -> dict[str, Optional[str]] async def get_connections( self, node_type: Optional[NodeType] = None - ) -> List[Dict[str, Union[str, int, float, bytes32]]]: + ) -> list[dict[str, Union[str, int, float, bytes32]]]: self.add_to_log("get_connections", (node_type,)) return [ { @@ -301,7 +301,7 @@ async def get_connections( test_rpc_clients.wallet_rpc_client.check_log(expected_calls) -def test_send(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_send(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client @@ -313,8 +313,8 @@ async def send_transaction( address: str, tx_config: TXConfig, fee: uint64 = uint64(0), - memos: Optional[List[str]] = None, - puzzle_decorator_override: Optional[List[Dict[str, Union[str, int, bool]]]] = None, + memos: Optional[list[str]] = None, + puzzle_decorator_override: Optional[list[dict[str, Union[str, int, bool]]]] = None, push: bool = True, timelock_info: ConditionValidTimes = ConditionValidTimes(), ) -> SendTransactionResponse: @@ -351,10 +351,10 @@ async def cat_spend( amount: Optional[uint64] = None, inner_address: Optional[str] = None, fee: uint64 = uint64(0), - memos: Optional[List[str]] = None, - additions: Optional[List[Dict[str, Any]]] = None, - removals: Optional[List[Coin]] = None, - cat_discrepancy: Optional[Tuple[int, Program, Program]] = None, # (extra_delta, tail_reveal, tail_solution) + memos: Optional[list[str]] = None, + additions: Optional[list[dict[str, Any]]] = None, + removals: Optional[list[Coin]] = None, + cat_discrepancy: Optional[tuple[int, Program, Program]] = None, # (extra_delta, tail_reveal, tail_solution) push: bool = True, timelock_info: ConditionValidTimes = ConditionValidTimes(), ) -> CATSpendResponse: @@ -468,7 +468,7 @@ async def cat_spend( test_rpc_clients.wallet_rpc_client.check_log(expected_calls) -def test_get_address(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_get_address(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client @@ -499,19 +499,19 @@ async def get_next_address(self, wallet_id: int, new_address: bool) -> str: test_rpc_clients.wallet_rpc_client.check_log(expected_calls) -def test_clawback(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_clawback(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client class ClawbackWalletRpcClient(TestWalletRpcClient): async def spend_clawback_coins( self, - coin_ids: List[bytes32], + coin_ids: list[bytes32], fee: int = 0, force: bool = False, push: bool = True, timelock_info: ConditionValidTimes = ConditionValidTimes(), - ) -> Dict[str, Any]: + ) -> dict[str, Any]: self.add_to_log("spend_clawback_coins", (coin_ids, fee, force, push, timelock_info)) tx_hex_list = [get_bytes32(6).hex(), get_bytes32(7).hex(), get_bytes32(8).hex()] return { @@ -551,7 +551,7 @@ async def spend_clawback_coins( test_rpc_clients.wallet_rpc_client.check_log(expected_calls) -def test_del_unconfirmed_tx(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_del_unconfirmed_tx(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client @@ -577,7 +577,7 @@ async def delete_unconfirmed_transactions(self, wallet_id: int) -> None: test_rpc_clients.wallet_rpc_client.check_log(expected_calls) -def test_get_derivation_index(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_get_derivation_index(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client @@ -601,7 +601,7 @@ async def get_current_derivation_index(self) -> str: test_rpc_clients.wallet_rpc_client.check_log(expected_calls) -def test_sign_message(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_sign_message(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client @@ -624,7 +624,7 @@ def test_sign_message(capsys: object, get_test_cli_clients: Tuple[TestRpcClients test_rpc_clients.wallet_rpc_client.check_log(expected_calls) -def test_update_derivation_index(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_update_derivation_index(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client @@ -644,12 +644,12 @@ async def extend_derivation_index(self, index: int) -> str: test_rpc_clients.wallet_rpc_client.check_log(expected_calls) -def test_add_token(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_add_token(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client class AddTokenRpcClient(TestWalletRpcClient): - async def create_wallet_for_existing_cat(self, asset_id: bytes) -> Dict[str, int]: + async def create_wallet_for_existing_cat(self, asset_id: bytes) -> dict[str, int]: self.add_to_log("create_wallet_for_existing_cat", (asset_id,)) return {"wallet_id": 3} @@ -675,7 +675,7 @@ async def set_cat_name(self, wallet_id: int, name: str) -> None: def test_make_offer_bad_filename( - capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path], tmp_path: Path + capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path], tmp_path: Path ) -> None: _, root_dir = get_test_cli_clients @@ -727,17 +727,17 @@ def test_make_offer_bad_filename( run_cli_command_and_assert(capsys, root_dir, command_args_unwritable, [""]) -def test_make_offer(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path], tmp_path: Path) -> None: +def test_make_offer(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path], tmp_path: Path) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client class MakeOfferRpcClient(TestWalletRpcClient): async def create_offer_for_ids( self, - offer_dict: Dict[uint32, int], + offer_dict: dict[uint32, int], tx_config: TXConfig, - driver_dict: Optional[Dict[str, Any]] = None, - solver: Optional[Dict[str, Any]] = None, + driver_dict: Optional[dict[str, Any]] = None, + solver: Optional[dict[str, Any]] = None, fee: uint64 = uint64(0), validate_only: bool = False, timelock_info: ConditionValidTimes = ConditionValidTimes(), @@ -857,7 +857,7 @@ async def create_offer_for_ids( test_rpc_clients.wallet_rpc_client.check_log(expected_calls) -def test_get_offers(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_get_offers(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client @@ -872,7 +872,7 @@ async def get_all_offers( exclude_my_offers: bool = False, exclude_taken_offers: bool = False, include_completed: bool = False, - ) -> List[TradeRecord]: + ) -> list[TradeRecord]: self.add_to_log( "get_all_offers", ( @@ -886,7 +886,7 @@ async def get_all_offers( include_completed, ), ) - records: List[TradeRecord] = [] + records: list[TradeRecord] = [] for i in reversed(range(start, end - 1)): # reversed to match the sort order trade_offer = TradeRecord( confirmed_at_index=uint32(0), @@ -964,7 +964,7 @@ async def get_all_offers( test_rpc_clients.wallet_rpc_client.check_log(expected_calls) -def test_take_offer(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_take_offer(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client @@ -973,7 +973,7 @@ async def take_offer( self, offer: Offer, tx_config: TXConfig, - solver: Optional[Dict[str, Any]] = None, + solver: Optional[dict[str, Any]] = None, fee: uint64 = uint64(0), push: bool = True, timelock_info: ConditionValidTimes = ConditionValidTimes(), @@ -1049,7 +1049,7 @@ async def take_offer( test_rpc_clients.wallet_rpc_client.check_log(expected_calls) -def test_cancel_offer(capsys: object, get_test_cli_clients: Tuple[TestRpcClients, Path]) -> None: +def test_cancel_offer(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client diff --git a/chia/_tests/cmds/wallet/test_wallet_check.py b/chia/_tests/cmds/wallet/test_wallet_check.py index d59c32ea8bde..f7c0d2068046 100644 --- a/chia/_tests/cmds/wallet/test_wallet_check.py +++ b/chia/_tests/cmds/wallet/test_wallet_check.py @@ -1,7 +1,5 @@ from __future__ import annotations -from typing import Dict, List - import pytest from chia.cmds.check_wallet_db import DerivationPath, Wallet, check_addresses_used_contiguous, check_for_gaps @@ -68,7 +66,7 @@ def make_dp( return DerivationPath(row, fields) -def used_list_to_dp_list(used_list: List[int], wallet_id: int) -> List[DerivationPath]: +def used_list_to_dp_list(used_list: list[int], wallet_id: int) -> list[DerivationPath]: dps = [] for index, used in enumerate(used_list): @@ -78,14 +76,14 @@ def used_list_to_dp_list(used_list: List[int], wallet_id: int) -> List[Derivatio def test_check_addresses_used_contiguous() -> None: - ok_used_lists: List[List[int]] = [ + ok_used_lists: list[list[int]] = [ [], [1], [0], [1, 0], ] - bad_used_lists: List[List[int]] = [ + bad_used_lists: list[list[int]] = [ [0, 1], ] @@ -103,9 +101,9 @@ def test_check_addresses_used_contiguous() -> None: def test_check_addresses_used_contiguous_multiple_wallets() -> None: - multi_used_lists: List[Dict[int, List[int]]] = [{0: [1, 1], 1: [1, 1]}, {0: [0, 0], 1: [1, 1]}] + multi_used_lists: list[dict[int, list[int]]] = [{0: [1, 1], 1: [1, 1]}, {0: [0, 0], 1: [1, 1]}] for entry in multi_used_lists: - dp_list: List[DerivationPath] = [] + dp_list: list[DerivationPath] = [] for wallet_id, used_list in entry.items(): dp_list.extend(used_list_to_dp_list(used_list, wallet_id)) assert [] == check_addresses_used_contiguous(dp_list) diff --git a/chia/_tests/conftest.py b/chia/_tests/conftest.py index 03db3411f14b..eb6d5f2c8770 100644 --- a/chia/_tests/conftest.py +++ b/chia/_tests/conftest.py @@ -13,8 +13,9 @@ import random import sysconfig import tempfile +from collections.abc import AsyncIterator, Iterator from contextlib import AsyncExitStack -from typing import Any, AsyncIterator, Callable, Dict, Iterator, List, Tuple, Union +from typing import Any, Callable, Union import aiohttp import pytest @@ -550,12 +551,12 @@ def time_out_assert_repeat_fixture(request: SubRequest) -> int: globals()[time_out_assert_repeat_fixture.__name__] = time_out_assert_repeat_fixture -def pytest_collection_modifyitems(session, config: pytest.Config, items: List[pytest.Function]): +def pytest_collection_modifyitems(session, config: pytest.Config, items: list[pytest.Function]): # https://github.com/pytest-dev/pytest/issues/3730#issuecomment-567142496 removed = [] kept = [] - all_error_lines: List[str] = [] - limit_consensus_modes_problems: List[str] = [] + all_error_lines: list[str] = [] + limit_consensus_modes_problems: list[str] = [] for item in items: limit_consensus_modes_marker = item.get_closest_marker("limit_consensus_modes") if limit_consensus_modes_marker is not None: @@ -583,7 +584,7 @@ def pytest_collection_modifyitems(session, config: pytest.Config, items: List[py all_error_lines.append("@pytest.mark.limit_consensus_modes used without consensus_mode:") all_error_lines.extend(f" {line}" for line in limit_consensus_modes_problems) - benchmark_problems: List[str] = [] + benchmark_problems: list[str] = [] for item in items: existing_benchmark_mark = item.get_closest_marker("benchmark") if existing_benchmark_mark is not None: @@ -619,7 +620,7 @@ async def two_nodes(db_version: int, self_hostname, blockchain_constants: Consen @pytest.fixture(scope="function") async def setup_two_nodes_fixture( db_version: int, blockchain_constants: ConsensusConstants -) -> AsyncIterator[Tuple[List[FullNodeSimulator], List[Tuple[WalletNode, ChiaServer]], BlockTools]]: +) -> AsyncIterator[tuple[list[FullNodeSimulator], list[tuple[WalletNode, ChiaServer]], BlockTools]]: async with setup_simulators_and_wallets(2, 0, blockchain_constants, db_version=db_version) as new: yield make_old_setup_simulators_and_wallets(new=new) @@ -681,7 +682,7 @@ async def wallet_node_100_pk(blockchain_constants: ConsensusConstants): @pytest.fixture(scope="function") async def simulator_and_wallet( blockchain_constants: ConsensusConstants, -) -> AsyncIterator[Tuple[List[FullNodeSimulator], List[Tuple[WalletNode, ChiaServer]], BlockTools]]: +) -> AsyncIterator[tuple[list[FullNodeSimulator], list[tuple[WalletNode, ChiaServer]], BlockTools]]: async with setup_simulators_and_wallets(1, 1, blockchain_constants) as new: yield make_old_setup_simulators_and_wallets(new=new) @@ -698,7 +699,7 @@ async def two_wallet_nodes(request, blockchain_constants: ConsensusConstants): @pytest.fixture(scope="function") async def two_wallet_nodes_services( blockchain_constants: ConsensusConstants, -) -> AsyncIterator[Tuple[List[SimulatorFullNodeService], List[WalletService], BlockTools]]: +) -> AsyncIterator[tuple[list[SimulatorFullNodeService], list[WalletService], BlockTools]]: async with setup_simulators_and_wallets_service(1, 2, blockchain_constants) as _: yield _ @@ -781,7 +782,7 @@ async def three_nodes_two_wallets(blockchain_constants: ConsensusConstants): @pytest.fixture(scope="function") async def one_node( blockchain_constants: ConsensusConstants, -) -> AsyncIterator[Tuple[List[Service], List[FullNodeSimulator], BlockTools]]: +) -> AsyncIterator[tuple[list[Service], list[FullNodeSimulator], BlockTools]]: async with setup_simulators_and_wallets_service(1, 0, blockchain_constants) as _: yield _ @@ -789,7 +790,7 @@ async def one_node( @pytest.fixture(scope="function") async def one_node_one_block( blockchain_constants: ConsensusConstants, -) -> AsyncIterator[Tuple[Union[FullNodeAPI, FullNodeSimulator], ChiaServer, BlockTools]]: +) -> AsyncIterator[tuple[Union[FullNodeAPI, FullNodeSimulator], ChiaServer, BlockTools]]: async with setup_simulators_and_wallets(1, 0, blockchain_constants) as new: (nodes, _, bt) = make_old_setup_simulators_and_wallets(new=new) full_node_1 = nodes[0] @@ -849,7 +850,7 @@ async def farmer_one_harvester_simulator_wallet( tmp_path: Path, blockchain_constants: ConsensusConstants, ) -> AsyncIterator[ - Tuple[ + tuple[ HarvesterService, FarmerService, SimulatorFullNodeService, @@ -866,7 +867,7 @@ async def farmer_one_harvester_simulator_wallet( yield harvester_services[0], farmer_service, nodes[0], wallets[0], bt -FarmerOneHarvester = Tuple[List[HarvesterService], FarmerService, BlockTools] +FarmerOneHarvester = tuple[list[HarvesterService], FarmerService, BlockTools] @pytest.fixture(scope="function") @@ -878,7 +879,7 @@ async def farmer_one_harvester(tmp_path: Path, get_b_tools: BlockTools) -> Async @pytest.fixture(scope="function") async def farmer_one_harvester_not_started( tmp_path: Path, get_b_tools: BlockTools -) -> AsyncIterator[Tuple[List[Service], Service]]: +) -> AsyncIterator[tuple[list[Service], Service]]: async with setup_farmer_multi_harvester(get_b_tools, 1, tmp_path, get_b_tools.constants, start_services=False) as _: yield _ @@ -886,7 +887,7 @@ async def farmer_one_harvester_not_started( @pytest.fixture(scope="function") async def farmer_two_harvester_not_started( tmp_path: Path, get_b_tools: BlockTools -) -> AsyncIterator[Tuple[List[Service], Service]]: +) -> AsyncIterator[tuple[list[Service], Service]]: async with setup_farmer_multi_harvester(get_b_tools, 2, tmp_path, get_b_tools.constants, start_services=False) as _: yield _ @@ -894,7 +895,7 @@ async def farmer_two_harvester_not_started( @pytest.fixture(scope="function") async def farmer_three_harvester_not_started( tmp_path: Path, get_b_tools: BlockTools -) -> AsyncIterator[Tuple[List[Service], Service]]: +) -> AsyncIterator[tuple[list[Service], Service]]: async with setup_farmer_multi_harvester(get_b_tools, 3, tmp_path, get_b_tools.constants, start_services=False) as _: yield _ @@ -934,7 +935,7 @@ async def get_b_tools(get_temp_keyring): @pytest.fixture(scope="function") async def daemon_connection_and_temp_keychain( get_b_tools: BlockTools, -) -> AsyncIterator[Tuple[aiohttp.ClientWebSocketResponse, Keychain]]: +) -> AsyncIterator[tuple[aiohttp.ClientWebSocketResponse, Keychain]]: async with setup_daemon(btools=get_b_tools) as daemon: keychain = daemon.keychain_server._default_keychain async with aiohttp.ClientSession() as session: @@ -1128,13 +1129,13 @@ def root_path_populated_with_config(tmp_chia_root) -> Path: @pytest.fixture(scope="function") -def config(root_path_populated_with_config: Path) -> Dict[str, Any]: +def config(root_path_populated_with_config: Path) -> dict[str, Any]: with lock_and_load_config(root_path_populated_with_config, "config.yaml") as config: return config @pytest.fixture(scope="function") -def config_with_address_prefix(root_path_populated_with_config: Path, prefix: str) -> Dict[str, Any]: +def config_with_address_prefix(root_path_populated_with_config: Path, prefix: str) -> dict[str, Any]: with lock_and_load_config(root_path_populated_with_config, "config.yaml") as config: if prefix is not None: config["network_overrides"]["config"][config["selected_network"]]["address_prefix"] = prefix @@ -1174,12 +1175,12 @@ async def simulation(bt, get_b_tools): yield full_system, get_b_tools -HarvesterFarmerEnvironment = Tuple[FarmerService, FarmerRpcClient, HarvesterService, HarvesterRpcClient, BlockTools] +HarvesterFarmerEnvironment = tuple[FarmerService, FarmerRpcClient, HarvesterService, HarvesterRpcClient, BlockTools] @pytest.fixture(scope="function") async def harvester_farmer_environment( - farmer_one_harvester: Tuple[List[HarvesterService], FarmerService, BlockTools], + farmer_one_harvester: tuple[list[HarvesterService], FarmerService, BlockTools], self_hostname: str, ) -> AsyncIterator[HarvesterFarmerEnvironment]: harvesters, farmer_service, bt = farmer_one_harvester @@ -1229,7 +1230,7 @@ def populated_temp_file_keyring_fixture() -> Iterator[TempKeyring]: async def farmer_harvester_2_simulators_zero_bits_plot_filter( tmp_path: Path, get_temp_keyring: Keychain ) -> AsyncIterator[ - Tuple[ + tuple[ FarmerService, HarvesterService, Union[FullNodeService, SimulatorFullNodeService], @@ -1248,7 +1249,7 @@ async def farmer_harvester_2_simulators_zero_bits_plot_filter( keychain=get_temp_keyring, ) - config_overrides: Dict[str, int] = {"full_node.max_sync_wait": 0} + config_overrides: dict[str, int] = {"full_node.max_sync_wait": 0} bts = [ await create_block_tools_async( @@ -1262,7 +1263,7 @@ async def farmer_harvester_2_simulators_zero_bits_plot_filter( for _ in range(2) ] - simulators: List[SimulatorFullNodeService] = [ + simulators: list[SimulatorFullNodeService] = [ await async_exit_stack.enter_async_context( # Passing simulator=True gets us this type guaranteed setup_full_node( # type: ignore[arg-type] diff --git a/chia/_tests/connection_utils.py b/chia/_tests/connection_utils.py index 1ae27a1a0193..ae124b218323 100644 --- a/chia/_tests/connection_utils.py +++ b/chia/_tests/connection_utils.py @@ -3,7 +3,6 @@ import asyncio import logging from pathlib import Path -from typing import List, Set, Tuple import aiohttp from cryptography import x509 @@ -44,8 +43,8 @@ async def add_dummy_connection( dummy_port: int, type: NodeType = NodeType.FULL_NODE, *, - additional_capabilities: List[Tuple[uint16, str]] = [], -) -> Tuple[asyncio.Queue, bytes32]: + additional_capabilities: list[tuple[uint16, str]] = [], +) -> tuple[asyncio.Queue, bytes32]: wsc, peer_id = await add_dummy_connection_wsc( server, self_hostname, dummy_port, type, additional_capabilities=additional_capabilities ) @@ -58,15 +57,15 @@ async def add_dummy_connection_wsc( self_hostname: str, dummy_port: int, type: NodeType = NodeType.FULL_NODE, - additional_capabilities: List[Tuple[uint16, str]] = [], -) -> Tuple[WSChiaConnection, bytes32]: + additional_capabilities: list[tuple[uint16, str]] = [], +) -> tuple[WSChiaConnection, bytes32]: timeout = aiohttp.ClientTimeout(total=10) session = aiohttp.ClientSession(timeout=timeout) config = load_config(server.root_path, "config.yaml") ca_crt_path: Path ca_key_path: Path - authenticated_client_types: Set[NodeType] = {NodeType.HARVESTER} + authenticated_client_types: set[NodeType] = {NodeType.HARVESTER} if type in authenticated_client_types: private_ca_crt_path, private_ca_key_path = private_ssl_ca_paths(server.root_path, config) ca_crt_path = private_ca_crt_path diff --git a/chia/_tests/core/cmds/test_keys.py b/chia/_tests/core/cmds/test_keys.py index 7629b1924bb9..56b0e4624959 100644 --- a/chia/_tests/core/cmds/test_keys.py +++ b/chia/_tests/core/cmds/test_keys.py @@ -4,7 +4,7 @@ import os import re from pathlib import Path -from typing import Dict, List, Optional +from typing import Optional import pytest from click.testing import CliRunner, Result @@ -105,7 +105,7 @@ def test_generate_with_new_config(self, tmp_path, empty_keyring): assert len(address_matches) > 1 address = address_matches[0] - config: Dict = load_config(tmp_path, "config.yaml") + config: dict = load_config(tmp_path, "config.yaml") assert config["farmer"]["xch_target_address"] == address assert config["pool"]["xch_target_address"] == address @@ -153,7 +153,7 @@ def test_generate_with_existing_config(self, tmp_path, empty_keyring): assert len(address_matches) > 1 address = address_matches[0] - existing_config: Dict = load_config(tmp_path, "config.yaml") + existing_config: dict = load_config(tmp_path, "config.yaml") assert existing_config["farmer"]["xch_target_address"] == address assert existing_config["pool"]["xch_target_address"] == address @@ -177,7 +177,7 @@ def test_generate_with_existing_config(self, tmp_path, empty_keyring): assert len(keychain.get_all_private_keys()) == 2 # Verify that the config's xch_target_address entries have not changed - config: Dict = load_config(tmp_path, "config.yaml") + config: dict = load_config(tmp_path, "config.yaml") assert config["farmer"]["xch_target_address"] == existing_config["farmer"]["xch_target_address"] assert config["pool"]["xch_target_address"] == existing_config["pool"]["xch_target_address"] @@ -200,7 +200,7 @@ def test_generate_with_existing_config(self, tmp_path, empty_keyring): ], ) def test_generate_and_add_label_parameter( - self, cmd_params: List[str], label: Optional[str], input_str: Optional[str], tmp_path, empty_keyring + self, cmd_params: list[str], label: Optional[str], input_str: Optional[str], tmp_path, empty_keyring ): keychain = empty_keyring keys_root_path = keychain.keyring_wrapper.keys_root_path diff --git a/chia/_tests/core/cmds/test_wallet.py b/chia/_tests/core/cmds/test_wallet.py index 3cdf0fbf8cc3..29f6538b901c 100644 --- a/chia/_tests/core/cmds/test_wallet.py +++ b/chia/_tests/core/cmds/test_wallet.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Any, Dict, Optional, Tuple +from typing import Any, Optional import pytest @@ -12,14 +12,14 @@ TEST_CRUNCHBERRIES_ASSET_ID = "1000000000000000000000000000000000000000000000000000000000000002" TEST_UNICORNTEARS_ASSET_ID = "1000000000000000000000000000000000000000000000000000000000000003" -TEST_ASSET_ID_NAME_MAPPING: Dict[bytes32, Tuple[uint32, str]] = { +TEST_ASSET_ID_NAME_MAPPING: dict[bytes32, tuple[uint32, str]] = { bytes32.from_hexstr(TEST_DUCKSAUCE_ASSET_ID): (uint32(2), "DuckSauce"), bytes32.from_hexstr(TEST_CRUNCHBERRIES_ASSET_ID): (uint32(3), "CrunchBerries"), bytes32.from_hexstr(TEST_UNICORNTEARS_ASSET_ID): (uint32(4), "UnicornTears"), } -async def cat_name_resolver(asset_id: bytes32) -> Optional[Tuple[Optional[uint32], str]]: +async def cat_name_resolver(asset_id: bytes32) -> Optional[tuple[Optional[uint32], str]]: return TEST_ASSET_ID_NAME_MAPPING.get(asset_id) diff --git a/chia/_tests/core/consensus/test_block_creation.py b/chia/_tests/core/consensus/test_block_creation.py index f59ab3dbdd36..6328f0633c60 100644 --- a/chia/_tests/core/consensus/test_block_creation.py +++ b/chia/_tests/core/consensus/test_block_creation.py @@ -1,7 +1,5 @@ from __future__ import annotations -from typing import List - import pytest from chia._tests.core.make_block_generator import make_block_generator @@ -15,9 +13,9 @@ @pytest.mark.parametrize("add_amount", [[0], [1, 2, 3], []]) @pytest.mark.parametrize("rem_amount", [[0], [1, 2, 3], []]) -def test_compute_block_fee(add_amount: List[int], rem_amount: List[int]) -> None: - additions: List[Coin] = [Coin(bytes32.random(), bytes32.random(), uint64(amt)) for amt in add_amount] - removals: List[Coin] = [Coin(bytes32.random(), bytes32.random(), uint64(amt)) for amt in rem_amount] +def test_compute_block_fee(add_amount: list[int], rem_amount: list[int]) -> None: + additions: list[Coin] = [Coin(bytes32.random(), bytes32.random(), uint64(amt)) for amt in add_amount] + removals: list[Coin] = [Coin(bytes32.random(), bytes32.random(), uint64(amt)) for amt in rem_amount] # the fee is the left-overs from the removals (spent) coins after deducting # the newly created coins (additions) diff --git a/chia/_tests/core/custom_types/test_coin.py b/chia/_tests/core/custom_types/test_coin.py index 1db447ddd901..dc5d7c447dd1 100644 --- a/chia/_tests/core/custom_types/test_coin.py +++ b/chia/_tests/core/custom_types/test_coin.py @@ -1,7 +1,5 @@ from __future__ import annotations -from typing import List - import pytest from chia.types.blockchain_format.coin import Coin @@ -65,7 +63,7 @@ def test_serialization(): (0x7FFFFFFFFFFFFFFF, [0x7F, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF]), ], ) -def test_name(amount: int, clvm: List[int]) -> None: +def test_name(amount: int, clvm: list[int]) -> None: H1 = bytes32(b"a" * 32) H2 = bytes32(b"b" * 32) diff --git a/chia/_tests/core/custom_types/test_spend_bundle.py b/chia/_tests/core/custom_types/test_spend_bundle.py index 3dc40111257c..533385fba2d7 100644 --- a/chia/_tests/core/custom_types/test_spend_bundle.py +++ b/chia/_tests/core/custom_types/test_spend_bundle.py @@ -2,7 +2,6 @@ import random import unittest -from typing import List, Tuple import pytest from chia_rs import G2Element @@ -36,9 +35,9 @@ def rand_hash(rng: random.Random) -> bytes32: return bytes32(ret) -def create_spends(num: int) -> Tuple[List[CoinSpend], List[Coin]]: - spends: List[CoinSpend] = [] - create_coin: List[Coin] = [] +def create_spends(num: int) -> tuple[list[CoinSpend], list[Coin]]: + spends: list[CoinSpend] = [] + create_coin: list[Coin] = [] rng = random.Random() puzzle = Program.to(1) diff --git a/chia/_tests/core/daemon/test_daemon.py b/chia/_tests/core/daemon/test_daemon.py index 71f20e8a3b4c..30923f5aad02 100644 --- a/chia/_tests/core/daemon/test_daemon.py +++ b/chia/_tests/core/daemon/test_daemon.py @@ -4,7 +4,7 @@ import json from dataclasses import dataclass, field, replace from pathlib import Path -from typing import Any, Dict, List, Optional, Tuple, Type, Union, cast +from typing import Any, Optional, Union, cast import aiohttp import pytest @@ -44,8 +44,8 @@ class RouteCase: route: str description: str - request: Dict[str, Any] - response: Dict[str, Any] + request: dict[str, Any] + response: dict[str, Any] marks: Marks = () @property @@ -57,9 +57,9 @@ def id(self) -> str: class RouteStatusCase: route: str description: str - request: Dict[str, Any] - response: Dict[str, Any] - status: Dict[str, Any] + request: dict[str, Any] + response: dict[str, Any] + status: dict[str, Any] marks: Marks = () @property @@ -70,8 +70,8 @@ def id(self) -> str: @dataclass class WalletAddressCase: id: str - request: Dict[str, Any] - response: Dict[str, Any] + request: dict[str, Any] + response: dict[str, Any] pubkeys_only: bool = field(default=False) marks: Marks = () @@ -79,8 +79,8 @@ class WalletAddressCase: @dataclass class KeysForPlotCase: id: str - request: Dict[str, Any] - response: Dict[str, Any] + request: dict[str, Any] + response: dict[str, Any] marks: Marks = () @@ -102,8 +102,8 @@ class ChiaPlottersBladebitArgsCase: def id(self) -> str: return self.case_id - def to_command_array(self) -> List[str]: - command: List[str] = ["bladebit", self.plot_type] + def to_command_array(self) -> list[str]: + command: list[str] = ["bladebit", self.plot_type] command += ["-r", str(self.threads)] command += ["-n", str(self.count)] command += ["-c", self.pool_contract] @@ -154,13 +154,13 @@ def poll(self) -> Optional[int]: @dataclass class Daemon: # Instance variables used by WebSocketServer.is_running() - services: Dict[str, Union[List[Service], Service]] - connections: Dict[str, Optional[List[Any]]] + services: dict[str, Union[list[Service], Service]] + connections: dict[str, Optional[list[Any]]] # Instance variables used by WebSocketServer.get_wallet_addresses() - net_config: Dict[str, Any] = field(default_factory=dict) + net_config: dict[str, Any] = field(default_factory=dict) - def get_command_mapping(self) -> Dict[str, Any]: + def get_command_mapping(self) -> dict[str, Any]: return { "get_routes": None, "example_one": None, @@ -171,23 +171,23 @@ def get_command_mapping(self) -> Dict[str, Any]: def is_service_running(self, service_name: str) -> bool: return WebSocketServer.is_service_running(cast(WebSocketServer, self), service_name) - async def running_services(self) -> Dict[str, Any]: + async def running_services(self) -> dict[str, Any]: return await WebSocketServer.running_services(cast(WebSocketServer, self)) - async def is_running(self, request: Dict[str, Any]) -> Dict[str, Any]: + async def is_running(self, request: dict[str, Any]) -> dict[str, Any]: return await WebSocketServer.is_running(cast(WebSocketServer, self), request) - async def get_routes(self, request: Dict[str, Any]) -> Dict[str, Any]: + async def get_routes(self, request: dict[str, Any]) -> dict[str, Any]: return await WebSocketServer.get_routes( cast(WebSocketServer, self), websocket=WebSocketResponse(), request=request ) - async def get_wallet_addresses(self, request: Dict[str, Any]) -> Dict[str, Any]: + async def get_wallet_addresses(self, request: dict[str, Any]) -> dict[str, Any]: return await WebSocketServer.get_wallet_addresses( cast(WebSocketServer, self), websocket=WebSocketResponse(), request=request ) - async def get_keys_for_plotting(self, request: Dict[str, Any]) -> Dict[str, Any]: + async def get_keys_for_plotting(self, request: dict[str, Any]) -> dict[str, Any]: return await WebSocketServer.get_keys_for_plotting( cast(WebSocketServer, self), websocket=WebSocketResponse(), request=request ) @@ -234,14 +234,14 @@ async def get_keys_for_plotting(self, request: Dict[str, Any]) -> Dict[str, Any] } -def add_private_key_response_data(fingerprint: int) -> Dict[str, object]: +def add_private_key_response_data(fingerprint: int) -> dict[str, object]: return { "success": True, "fingerprint": fingerprint, } -def fingerprint_missing_response_data(request_type: Type[object]) -> Dict[str, object]: +def fingerprint_missing_response_data(request_type: type[object]) -> dict[str, object]: return { "success": False, "error": "malformed request", @@ -249,7 +249,7 @@ def fingerprint_missing_response_data(request_type: Type[object]) -> Dict[str, o } -def fingerprint_not_found_response_data(fingerprint: int) -> Dict[str, object]: +def fingerprint_not_found_response_data(fingerprint: int) -> dict[str, object]: return { "success": False, "error": "key not found", @@ -259,23 +259,23 @@ def fingerprint_not_found_response_data(fingerprint: int) -> Dict[str, object]: } -def get_key_response_data(key: KeyData) -> Dict[str, object]: +def get_key_response_data(key: KeyData) -> dict[str, object]: return {"success": True, **GetKeyResponse(key=key).to_json_dict()} -def get_keys_response_data(keys: List[KeyData]) -> Dict[str, object]: +def get_keys_response_data(keys: list[KeyData]) -> dict[str, object]: return {"success": True, **GetKeysResponse(keys=keys).to_json_dict()} -def get_public_key_response_data(key: KeyData) -> Dict[str, object]: +def get_public_key_response_data(key: KeyData) -> dict[str, object]: return {"success": True, **GetPublicKeyResponse(key=key).to_json_dict()} -def get_public_keys_response_data(keys: List[KeyData]) -> Dict[str, object]: +def get_public_keys_response_data(keys: list[KeyData]) -> dict[str, object]: return {"success": True, **GetPublicKeysResponse(keys=keys).to_json_dict()} -def label_missing_response_data(request_type: Type[Any]) -> Dict[str, Any]: +def label_missing_response_data(request_type: type[Any]) -> dict[str, Any]: return { "success": False, "error": "malformed request", @@ -283,7 +283,7 @@ def label_missing_response_data(request_type: Type[Any]) -> Dict[str, Any]: } -def label_exists_response_data(fingerprint: int, label: str) -> Dict[str, Any]: +def label_exists_response_data(fingerprint: int, label: str) -> dict[str, Any]: return { "success": False, "error": "malformed request", @@ -312,7 +312,7 @@ def label_exists_response_data(fingerprint: int, label: str) -> Dict[str, Any]: def assert_response( response: aiohttp.http_websocket.WSMessage, - expected_response_data: Dict[str, Any], + expected_response_data: dict[str, Any], request_id: Optional[str] = None, ack: bool = True, command: Optional[str] = None, @@ -332,7 +332,7 @@ def assert_response( def assert_response_success_only( response: aiohttp.http_websocket.WSMessage, request_id: Optional[str] = None -) -> Dict[str, Any]: +) -> dict[str, Any]: # Expect: JSON response assert response.type == aiohttp.WSMsgType.TEXT message = json.loads(response.data.strip()) @@ -343,7 +343,7 @@ def assert_response_success_only( return message -def assert_running_services_response(response_dict: Dict[str, Any], expected_response_dict: Dict[str, Any]) -> None: +def assert_running_services_response(response_dict: dict[str, Any], expected_response_dict: dict[str, Any]) -> None: for k, v in expected_response_dict.items(): if k == "running_services": # Order of services is not guaranteed @@ -1035,7 +1035,7 @@ async def test_add_private_key_label(daemon_connection_and_temp_keychain): ws, keychain = daemon_connection_and_temp_keychain async def assert_add_private_key_with_label( - key_data: KeyData, request: Dict[str, object], add_private_key_response: Dict[str, object] + key_data: KeyData, request: dict[str, object], add_private_key_response: dict[str, object] ) -> None: await ws.send_str(create_payload("add_private_key", request, "test", "daemon")) assert_response(await ws.receive(), add_private_key_response) @@ -1282,7 +1282,7 @@ async def test_key_label_deletion(daemon_connection_and_temp_keychain): ) @pytest.mark.anyio async def test_key_label_methods( - daemon_connection_and_temp_keychain, method: str, parameter: Dict[str, Any], response_data_dict: Dict[str, Any] + daemon_connection_and_temp_keychain, method: str, parameter: dict[str, Any], response_data_dict: dict[str, Any] ) -> None: ws, keychain = daemon_connection_and_temp_keychain keychain.add_key(test_key_data.mnemonic_str(), "key_0") @@ -1291,7 +1291,7 @@ async def test_key_label_methods( @pytest.mark.anyio -async def test_bad_json(daemon_connection_and_temp_keychain: Tuple[aiohttp.ClientWebSocketResponse, Keychain]) -> None: +async def test_bad_json(daemon_connection_and_temp_keychain: tuple[aiohttp.ClientWebSocketResponse, Keychain]) -> None: ws, _ = daemon_connection_and_temp_keychain await ws.send_str("{doo: '12'}") # send some bad json @@ -1390,7 +1390,7 @@ async def test_bad_json(daemon_connection_and_temp_keychain: Tuple[aiohttp.Clien ) @pytest.mark.anyio async def test_misc_daemon_ws( - daemon_connection_and_temp_keychain: Tuple[aiohttp.ClientWebSocketResponse, Keychain], + daemon_connection_and_temp_keychain: tuple[aiohttp.ClientWebSocketResponse, Keychain], case: RouteCase, ) -> None: ws, _ = daemon_connection_and_temp_keychain @@ -1404,7 +1404,7 @@ async def test_misc_daemon_ws( @pytest.mark.anyio async def test_unexpected_json( - daemon_connection_and_temp_keychain: Tuple[aiohttp.ClientWebSocketResponse, Keychain] + daemon_connection_and_temp_keychain: tuple[aiohttp.ClientWebSocketResponse, Keychain] ) -> None: ws, _ = daemon_connection_and_temp_keychain @@ -1424,7 +1424,7 @@ async def test_unexpected_json( ) @pytest.mark.anyio async def test_commands_with_no_data( - daemon_connection_and_temp_keychain: Tuple[aiohttp.ClientWebSocketResponse, Keychain], command_to_test: str + daemon_connection_and_temp_keychain: tuple[aiohttp.ClientWebSocketResponse, Keychain], command_to_test: str ) -> None: ws, _ = daemon_connection_and_temp_keychain @@ -1468,7 +1468,7 @@ async def test_commands_with_no_data( ) @pytest.mark.anyio async def test_set_keyring_passphrase_ws( - daemon_connection_and_temp_keychain: Tuple[aiohttp.ClientWebSocketResponse, Keychain], + daemon_connection_and_temp_keychain: tuple[aiohttp.ClientWebSocketResponse, Keychain], case: RouteCase, ) -> None: ws, _ = daemon_connection_and_temp_keychain @@ -1571,7 +1571,7 @@ async def test_set_keyring_passphrase_ws( ) @pytest.mark.anyio async def test_passphrase_apis( - daemon_connection_and_temp_keychain: Tuple[aiohttp.ClientWebSocketResponse, Keychain], + daemon_connection_and_temp_keychain: tuple[aiohttp.ClientWebSocketResponse, Keychain], case: RouteCase, ) -> None: ws, keychain = daemon_connection_and_temp_keychain @@ -1652,7 +1652,7 @@ async def test_passphrase_apis( ) @pytest.mark.anyio async def test_keychain_status_messages( - daemon_connection_and_temp_keychain: Tuple[aiohttp.ClientWebSocketResponse, Keychain], + daemon_connection_and_temp_keychain: tuple[aiohttp.ClientWebSocketResponse, Keychain], case: RouteStatusCase, ) -> None: ws, keychain = daemon_connection_and_temp_keychain @@ -1700,7 +1700,7 @@ async def test_keychain_status_messages( ) @pytest.mark.anyio async def test_keyring_file_deleted( - daemon_connection_and_temp_keychain: Tuple[aiohttp.ClientWebSocketResponse, Keychain], + daemon_connection_and_temp_keychain: tuple[aiohttp.ClientWebSocketResponse, Keychain], case: RouteCase, ) -> None: ws, keychain = daemon_connection_and_temp_keychain @@ -1758,7 +1758,7 @@ async def test_keyring_file_deleted( ) @pytest.mark.anyio async def test_plotter_errors( - daemon_connection_and_temp_keychain: Tuple[aiohttp.ClientWebSocketResponse, Keychain], case: RouteCase + daemon_connection_and_temp_keychain: tuple[aiohttp.ClientWebSocketResponse, Keychain], case: RouteCase ) -> None: ws, keychain = daemon_connection_and_temp_keychain @@ -1871,7 +1871,7 @@ async def test_plotter_errors( ) @pytest.mark.anyio async def test_plotter_options( - daemon_connection_and_temp_keychain: Tuple[aiohttp.ClientWebSocketResponse, Keychain], + daemon_connection_and_temp_keychain: tuple[aiohttp.ClientWebSocketResponse, Keychain], get_b_tools: BlockTools, case: RouteCase, ) -> None: @@ -1937,7 +1937,7 @@ def check_plot_queue_log( @pytest.mark.anyio async def test_plotter_roundtrip( - daemon_connection_and_temp_keychain: Tuple[aiohttp.ClientWebSocketResponse, Keychain], get_b_tools: BlockTools + daemon_connection_and_temp_keychain: tuple[aiohttp.ClientWebSocketResponse, Keychain], get_b_tools: BlockTools ) -> None: ws, keychain = daemon_connection_and_temp_keychain @@ -1951,7 +1951,7 @@ async def test_plotter_roundtrip( root_path = get_b_tools.root_path - plotting_request: Dict[str, Any] = { + plotting_request: dict[str, Any] = { **plotter_request_ref, "d": str(root_path), "t": str(root_path), @@ -2009,7 +2009,7 @@ async def test_plotter_roundtrip( @pytest.mark.anyio async def test_plotter_stop_plotting( - daemon_connection_and_temp_keychain: Tuple[aiohttp.ClientWebSocketResponse, Keychain], get_b_tools: BlockTools + daemon_connection_and_temp_keychain: tuple[aiohttp.ClientWebSocketResponse, Keychain], get_b_tools: BlockTools ) -> None: ws, keychain = daemon_connection_and_temp_keychain @@ -2023,7 +2023,7 @@ async def test_plotter_stop_plotting( root_path = get_b_tools.root_path - plotting_request: Dict[str, Any] = { + plotting_request: dict[str, Any] = { **plotter_request_ref, "d": str(root_path), "t": str(root_path), @@ -2066,7 +2066,7 @@ async def test_plotter_stop_plotting( payload = dict_to_json_str(payload_rpc) await ws.send_str(payload) - responses: List[WSMessage] = [] + responses: list[WSMessage] = [] # 3, 4, and 5) # Removing @@ -2075,8 +2075,8 @@ async def test_plotter_stop_plotting( for _ in range(3): responses.append(await ws.receive()) - state_changes: List[WSMessage] = [] - finished: List[WSMessage] = [] + state_changes: list[WSMessage] = [] + finished: list[WSMessage] = [] for response in responses: message = json.loads(response.data.strip()) @@ -2113,7 +2113,7 @@ def test_run_plotter_bladebit( def bladebit_exists(x: Path) -> bool: return True if isinstance(x, Path) and x.parent == root_path / "plotters" else mocker.DEFAULT - def get_bladebit_version(_: Path) -> Tuple[bool, List[str]]: + def get_bladebit_version(_: Path) -> tuple[bool, list[str]]: return True, ["3", "0", "0"] mocker.patch("os.path.exists", side_effect=bladebit_exists) diff --git a/chia/_tests/core/daemon/test_keychain_proxy.py b/chia/_tests/core/daemon/test_keychain_proxy.py index c65d0b69f397..a07c45f99fb1 100644 --- a/chia/_tests/core/daemon/test_keychain_proxy.py +++ b/chia/_tests/core/daemon/test_keychain_proxy.py @@ -1,8 +1,9 @@ from __future__ import annotations import logging +from collections.abc import AsyncGenerator from dataclasses import replace -from typing import Any, AsyncGenerator +from typing import Any import pytest diff --git a/chia/_tests/core/data_layer/conftest.py b/chia/_tests/core/data_layer/conftest.py index 32c218129254..822ab6fa5e40 100644 --- a/chia/_tests/core/data_layer/conftest.py +++ b/chia/_tests/core/data_layer/conftest.py @@ -4,7 +4,8 @@ import pathlib import sys import time -from typing import Any, AsyncIterable, Awaitable, Callable, Dict, Iterator +from collections.abc import AsyncIterable, Awaitable, Iterator +from typing import Any, Callable import pytest @@ -84,7 +85,7 @@ async def valid_node_values_fixture( data_store: DataStore, store_id: bytes32, node_type: NodeType, -) -> Dict[str, Any]: +) -> dict[str, Any]: await add_01234567_example(data_store=data_store, store_id=store_id) if node_type == NodeType.INTERNAL: @@ -98,7 +99,7 @@ async def valid_node_values_fixture( @pytest.fixture(name="bad_node_type", params=range(2 * len(NodeType))) -def bad_node_type_fixture(request: SubRequest, valid_node_values: Dict[str, Any]) -> int: +def bad_node_type_fixture(request: SubRequest, valid_node_values: dict[str, Any]) -> int: if request.param == valid_node_values["node_type"]: pytest.skip("Actually, this is a valid node type") diff --git a/chia/_tests/core/data_layer/test_data_cli.py b/chia/_tests/core/data_layer/test_data_cli.py index 778a795aeae1..616e3f10d515 100644 --- a/chia/_tests/core/data_layer/test_data_cli.py +++ b/chia/_tests/core/data_layer/test_data_cli.py @@ -1,7 +1,6 @@ from __future__ import annotations import json -from typing import Dict, List import pytest @@ -32,7 +31,7 @@ def test_round_trip(chia_root: ChiaRoot, chia_daemon: None, chia_data: None) -> store_id = dic["id"] key = "1a6f915513173902a7216e7d9e4a16bfd088e20683f45de3b432ce72e9cc7aa8" value = "ffff8353594d8083616263" - changelist: List[Dict[str, str]] = [{"action": "insert", "key": key, "value": value}] + changelist: list[dict[str, str]] = [{"action": "insert", "key": key, "value": value}] print(json.dumps(changelist)) update = chia_root.run( args=["data", "update_data_store", "--id", store_id, "--changelist", json.dumps(changelist)] diff --git a/chia/_tests/core/data_layer/test_data_layer.py b/chia/_tests/core/data_layer/test_data_layer.py index 04d5f1b6a11d..71fa0786ef45 100644 --- a/chia/_tests/core/data_layer/test_data_layer.py +++ b/chia/_tests/core/data_layer/test_data_layer.py @@ -1,7 +1,7 @@ from __future__ import annotations from pathlib import Path -from typing import Any, Dict, cast +from typing import Any, cast import pytest @@ -26,7 +26,7 @@ async def await_closed(self) -> None: @pytest.mark.parametrize(argnames="enable", argvalues=[True, False], ids=["log", "do not log"]) @pytest.mark.anyio -async def test_sql_logs(enable: bool, config: Dict[str, Any], tmp_chia_root: Path) -> None: +async def test_sql_logs(enable: bool, config: dict[str, Any], tmp_chia_root: Path) -> None: config["data_layer"]["log_sqlite_cmds"] = enable log_path = tmp_chia_root.joinpath("log", "data_sql.log") @@ -51,7 +51,7 @@ async def test_sql_logs(enable: bool, config: Dict[str, Any], tmp_chia_root: Pat @pytest.mark.anyio async def test_plugin_requests_use_custom_headers( recording_web_server: RecordingWebServer, - config: Dict[str, Any], + config: dict[str, Any], tmp_chia_root: Path, ) -> None: header_key = "vbiuoqemnrlah" diff --git a/chia/_tests/core/data_layer/test_data_layer_util.py b/chia/_tests/core/data_layer/test_data_layer_util.py index 8e43dfec9faf..8ef483dfec1d 100644 --- a/chia/_tests/core/data_layer/test_data_layer_util.py +++ b/chia/_tests/core/data_layer/test_data_layer_util.py @@ -2,7 +2,6 @@ import dataclasses from random import Random -from typing import List, Tuple import pytest @@ -32,7 +31,7 @@ def create_valid_proof_of_inclusion(layer_count: int, other_hash_side: Side) -> ProofOfInclusion: node_hash = bytes32(b"a" * 32) - layers: List[ProofOfInclusionLayer] = [] + layers: list[ProofOfInclusionLayer] = [] existing_hash = node_hash @@ -143,7 +142,7 @@ def test_internal_hash(seeded_random: Random) -> None: def definition(left_hash: bytes32, right_hash: bytes32) -> bytes32: return Program.to((left_hash, right_hash)).get_tree_hash_precalc(left_hash, right_hash) - data: List[Tuple[bytes32, bytes32, bytes32]] = [] + data: list[tuple[bytes32, bytes32, bytes32]] = [] for _ in range(5000): left_hash = bytes32.random(r=seeded_random) right_hash = bytes32.random(r=seeded_random) @@ -170,7 +169,7 @@ def test_leaf_hash(seeded_random: Random) -> None: def definition(key: bytes, value: bytes) -> bytes32: return SerializedProgram.to((key, value)).get_tree_hash() - data: List[Tuple[bytes, bytes, bytes32]] = [] + data: list[tuple[bytes, bytes, bytes32]] = [] for cycle in range(20000): if cycle in (0, 1): length = 0 @@ -200,7 +199,7 @@ def test_key_hash(seeded_random: Random) -> None: def definition(key: bytes) -> bytes32: return SerializedProgram.to(key).get_tree_hash() - data: List[Tuple[bytes, bytes32]] = [] + data: list[tuple[bytes, bytes32]] = [] for cycle in range(30000): if cycle == 0: length = 0 diff --git a/chia/_tests/core/data_layer/test_data_rpc.py b/chia/_tests/core/data_layer/test_data_rpc.py index 622e441b8f56..329c65c99293 100644 --- a/chia/_tests/core/data_layer/test_data_rpc.py +++ b/chia/_tests/core/data_layer/test_data_rpc.py @@ -11,11 +11,12 @@ import sqlite3 import sys import time +from collections.abc import AsyncIterator from copy import deepcopy from dataclasses import dataclass from enum import IntEnum from pathlib import Path -from typing import Any, AsyncIterator, Dict, List, Optional, Set, Tuple, cast +from typing import Any, Optional, cast import anyio import pytest @@ -73,10 +74,10 @@ from chia.wallet.wallet_node import WalletNode pytestmark = pytest.mark.data_layer -nodes = Tuple[WalletNode, FullNodeSimulator] -nodes_with_port_bt_ph = Tuple[WalletRpcApi, FullNodeSimulator, uint16, bytes32, BlockTools] -wallet_and_port_tuple = Tuple[WalletNode, uint16] -two_wallets_with_port = Tuple[Tuple[wallet_and_port_tuple, wallet_and_port_tuple], FullNodeSimulator, BlockTools] +nodes = tuple[WalletNode, FullNodeSimulator] +nodes_with_port_bt_ph = tuple[WalletRpcApi, FullNodeSimulator, uint16, bytes32, BlockTools] +wallet_and_port_tuple = tuple[WalletNode, uint16] +two_wallets_with_port = tuple[tuple[wallet_and_port_tuple, wallet_and_port_tuple], FullNodeSimulator, BlockTools] class InterfaceLayer(enum.Enum): @@ -261,7 +262,7 @@ async def test_create_insert_get( data_rpc_api = DataLayerRpcApi(data_layer) key = b"a" value = b"\x00\x01" - changelist: List[Dict[str, str]] = [{"action": "insert", "key": key.hex(), "value": value.hex()}] + changelist: list[dict[str, str]] = [{"action": "insert", "key": key.hex(), "value": value.hex()}] res = await data_rpc_api.create_data_store({}) assert res is not None store_id = bytes32.from_hexstr(res["id"]) @@ -340,7 +341,7 @@ async def test_upsert( data_rpc_api = DataLayerRpcApi(data_layer) key = b"a" value = b"\x00\x01" - changelist: List[Dict[str, str]] = [ + changelist: list[dict[str, str]] = [ {"action": "delete", "key": key.hex()}, {"action": "insert", "key": key.hex(), "value": value.hex()}, ] @@ -373,7 +374,7 @@ async def test_create_double_insert( await farm_block_check_singleton(data_layer, full_node_api, ph, store_id, wallet=wallet_rpc_api.service) key1 = b"a" value1 = b"\x01\x02" - changelist: List[Dict[str, str]] = [{"action": "insert", "key": key1.hex(), "value": value1.hex()}] + changelist: list[dict[str, str]] = [{"action": "insert", "key": key1.hex(), "value": value1.hex()}] res = await data_rpc_api.batch_update({"id": store_id.hex(), "changelist": changelist}) update_tx_rec0 = res["tx_id"] await farm_block_with_spend(full_node_api, ph, update_tx_rec0, wallet_rpc_api) @@ -411,7 +412,7 @@ async def test_keys_values_ancestors( await farm_block_check_singleton(data_layer, full_node_api, ph, store_id, wallet=wallet_rpc_api.service) key1 = b"a" value1 = b"\x01\x02" - changelist: List[Dict[str, str]] = [{"action": "insert", "key": key1.hex(), "value": value1.hex()}] + changelist: list[dict[str, str]] = [{"action": "insert", "key": key1.hex(), "value": value1.hex()}] key2 = b"b" value2 = b"\x03\x02" changelist.append({"action": "insert", "key": key2.hex(), "value": value2.hex()}) @@ -492,7 +493,7 @@ async def test_get_roots( key1 = b"a" value1 = b"\x01\x02" - changelist: List[Dict[str, str]] = [{"action": "insert", "key": key1.hex(), "value": value1.hex()}] + changelist: list[dict[str, str]] = [{"action": "insert", "key": key1.hex(), "value": value1.hex()}] key2 = b"b" value2 = b"\x03\x02" changelist.append({"action": "insert", "key": key2.hex(), "value": value2.hex()}) @@ -539,7 +540,7 @@ async def test_get_root_history( await farm_block_check_singleton(data_layer, full_node_api, ph, store_id1, wallet=wallet_rpc_api.service) key1 = b"a" value1 = b"\x01\x02" - changelist: List[Dict[str, str]] = [{"action": "insert", "key": key1.hex(), "value": value1.hex()}] + changelist: list[dict[str, str]] = [{"action": "insert", "key": key1.hex(), "value": value1.hex()}] key2 = b"b" value2 = b"\x03\x02" changelist.append({"action": "insert", "key": key2.hex(), "value": value2.hex()}) @@ -593,7 +594,7 @@ async def test_get_kv_diff( await farm_block_check_singleton(data_layer, full_node_api, ph, store_id1, wallet=wallet_rpc_api.service) key1 = b"a" value1 = b"\x01\x02" - changelist: List[Dict[str, str]] = [{"action": "insert", "key": key1.hex(), "value": value1.hex()}] + changelist: list[dict[str, str]] = [{"action": "insert", "key": key1.hex(), "value": value1.hex()}] key2 = b"b" value2 = b"\x03\x02" changelist.append({"action": "insert", "key": key2.hex(), "value": value2.hex()}) @@ -661,7 +662,7 @@ async def test_batch_update_matches_single_operations( key = b"a" value = b"\x00\x01" - changelist: List[Dict[str, str]] = [{"action": "insert", "key": key.hex(), "value": value.hex()}] + changelist: list[dict[str, str]] = [{"action": "insert", "key": key.hex(), "value": value.hex()}] res = await data_rpc_api.batch_update({"id": store_id.hex(), "changelist": changelist}) update_tx_rec0 = res["tx_id"] await farm_block_with_spend(full_node_api, ph, update_tx_rec0, wallet_rpc_api) @@ -824,7 +825,7 @@ async def offer_setup_fixture( [full_node_service], wallet_services, bt = two_wallet_nodes_services enable_batch_autoinsertion_settings = getattr(request, "param", (True, True)) full_node_api = full_node_service._api - wallets: List[Wallet] = [] + wallets: list[Wallet] = [] for wallet_service in wallet_services: wallet_node = wallet_service._node assert wallet_node.server is not None @@ -836,7 +837,7 @@ async def offer_setup_fixture( await full_node_api.farm_blocks_to_wallet(count=1, wallet=wallet, timeout=60) async with contextlib.AsyncExitStack() as exit_stack: - store_setups: List[StoreSetup] = [] + store_setups: list[StoreSetup] = [] for enable_batch_autoinsert, wallet_service in zip(enable_batch_autoinsertion_settings, wallet_services): assert wallet_service.rpc_server is not None port = wallet_service.rpc_server.listen_port @@ -928,7 +929,7 @@ async def offer_setup_fixture( async def populate_offer_setup(offer_setup: OfferSetup, count: int) -> OfferSetup: if count > 0: - setups: Tuple[Tuple[StoreSetup, bytes], Tuple[StoreSetup, bytes]] = ( + setups: tuple[tuple[StoreSetup, bytes], tuple[StoreSetup, bytes]] = ( (offer_setup.maker, b"\x01"), (offer_setup.taker, b"\x02"), ) @@ -1014,11 +1015,11 @@ async def process_for_data_layer_keys( @dataclass(frozen=True) class MakeAndTakeReference: entries_to_insert: int - make_offer_response: Dict[str, Any] - maker_inclusions: List[Dict[str, Any]] - maker_root_history: List[bytes32] - taker_inclusions: List[Dict[str, Any]] - taker_root_history: List[bytes32] + make_offer_response: dict[str, Any] + maker_inclusions: list[dict[str, Any]] + maker_root_history: list[bytes32] + taker_inclusions: list[dict[str, Any]] + taker_root_history: list[bytes32] trade_id: str @@ -2004,7 +2005,7 @@ async def test_get_sync_status( key = b"a" value = b"\x00\x01" - changelist: List[Dict[str, str]] = [{"action": "insert", "key": key.hex(), "value": value.hex()}] + changelist: list[dict[str, str]] = [{"action": "insert", "key": key.hex(), "value": value.hex()}] res = await data_rpc_api.batch_update({"id": store_id.hex(), "changelist": changelist}) update_tx_rec0 = res["tx_id"] await farm_block_with_spend(full_node_api, ph, update_tx_rec0, wallet_rpc_api) @@ -2093,7 +2094,7 @@ async def test_clear_pending_roots( root_path=bt.root_path, ) elif layer == InterfaceLayer.cli: - args: List[str] = [ + args: list[str] = [ sys.executable, "-m", "chia", @@ -2223,7 +2224,7 @@ async def test_maximum_full_file_count( ) as data_layer: data_rpc_api = DataLayerRpcApi(data_layer) res = await data_rpc_api.create_data_store({}) - root_hashes: List[bytes32] = [] + root_hashes: list[bytes32] = [] assert res is not None store_id = bytes32.from_hexstr(res["id"]) await farm_block_check_singleton(data_layer, full_node_api, ph, store_id, wallet=wallet_rpc_api.service) @@ -2304,7 +2305,7 @@ async def test_unsubscribe_removes_files( ) as data_layer: data_rpc_api = DataLayerRpcApi(data_layer) res = await data_rpc_api.create_data_store({}) - root_hashes: List[bytes32] = [] + root_hashes: list[bytes32] = [] assert res is not None store_id = bytes32.from_hexstr(res["id"]) await farm_block_check_singleton(data_layer, full_node_api, ph, store_id, wallet=wallet_rpc_api.service) @@ -2462,8 +2463,8 @@ async def test_mirrors( @dataclass(frozen=True) class ProofReference: entries_to_insert: int - keys_to_prove: List[str] - verify_proof_response: Dict[str, Any] + keys_to_prove: list[str] + verify_proof_response: dict[str, Any] def populate_reference(count: int, keys_to_prove: int) -> ProofReference: @@ -2612,7 +2613,7 @@ async def test_dl_proof(offer_setup: OfferSetup, reference: ProofReference) -> N assert verify == reference.verify_proof_response # test InterfaceLayer.cli - key_args: List[str] = [] + key_args: list[str] = [] for key in reference.keys_to_prove: key_args.append("--key") key_args.append(key) @@ -2669,7 +2670,7 @@ async def test_dl_proof_errors( with pytest.raises(Exception, match="No generations found"): await data_rpc_api.get_proof(request={"store_id": store_id.hex(), "keys": [b"4".hex()]}) - changelist: List[Dict[str, str]] = [{"action": "insert", "key": b"a".hex(), "value": b"\x00\x01".hex()}] + changelist: list[dict[str, str]] = [{"action": "insert", "key": b"a".hex(), "value": b"\x00\x01".hex()}] res = await data_rpc_api.batch_update({"id": store_id.hex(), "changelist": changelist}) update_tx_rec0 = res["tx_id"] await farm_block_with_spend(full_node_api, ph, update_tx_rec0, wallet_rpc_api) @@ -2741,7 +2742,7 @@ async def test_dl_proof_changed_root(offer_setup: OfferSetup, seeded_random: ran key = b"a" value = b"\x00\x01" - changelist: List[Dict[str, str]] = [{"action": "insert", "key": key.hex(), "value": value.hex()}] + changelist: list[dict[str, str]] = [{"action": "insert", "key": key.hex(), "value": value.hex()}] await offer_setup.maker.api.batch_update({"id": offer_setup.maker.id.hex(), "changelist": changelist}) await process_for_data_layer_keys( @@ -2775,7 +2776,7 @@ async def test_pagination_rpcs( value1 = b"\x01\x02" key1_hash = key_hash(key1) leaf_hash1 = leaf_hash(key1, value1) - changelist: List[Dict[str, str]] = [{"action": "insert", "key": key1.hex(), "value": value1.hex()}] + changelist: list[dict[str, str]] = [{"action": "insert", "key": key1.hex(), "value": value1.hex()}] key2 = b"ba" value2 = b"\x03\x02" key2_hash = key_hash(key2) @@ -3087,7 +3088,7 @@ async def test_pagination_cmds( elif layer == InterfaceLayer.cli: for command in ("get_keys", "get_keys_values", "get_kv_diff"): if command == "get_keys" or command == "get_keys_values": - args: List[str] = [ + args: list[str] = [ sys.executable, "-m", "chia", @@ -3276,7 +3277,7 @@ async def test_unsubmitted_batch_update( to_insert = [(b"a", b"\x00\x01"), (b"b", b"\x00\x02"), (b"c", b"\x00\x03")] for key, value in to_insert: - changelist: List[Dict[str, str]] = [{"action": "insert", "key": key.hex(), "value": value.hex()}] + changelist: list[dict[str, str]] = [{"action": "insert", "key": key.hex(), "value": value.hex()}] if layer == InterfaceLayer.direct: res = await data_rpc_api.batch_update( @@ -3295,7 +3296,7 @@ async def test_unsubmitted_batch_update( ) assert res == {"success": True} elif layer == InterfaceLayer.cli: - args: List[str] = [ + args: list[str] = [ sys.executable, "-m", "chia", @@ -3535,7 +3536,7 @@ async def test_multistore_update( data_store = data_layer.data_store data_rpc_api = DataLayerRpcApi(data_layer) - store_ids: List[bytes32] = [] + store_ids: list[bytes32] = [] store_ids_count = 5 for _ in range(store_ids_count): @@ -3545,10 +3546,10 @@ async def test_multistore_update( await farm_block_check_singleton(data_layer, full_node_api, ph, store_id, wallet=wallet_rpc_api.service) store_ids.append(store_id) - store_updates: List[Dict[str, Any]] = [] + store_updates: list[dict[str, Any]] = [] key_offset = 1000 for index, store_id in enumerate(store_ids): - changelist: List[Dict[str, str]] = [] + changelist: list[dict[str, str]] = [] key = index.to_bytes(2, "big") value = index.to_bytes(2, "big") changelist.append({"action": "insert", "key": key.hex(), "value": value.hex()}) @@ -3734,7 +3735,7 @@ class ModifiedStatus(IntEnum): key = b"0000" value = b"0000" - changelist: List[Dict[str, str]] = [{"action": "insert", "key": key.hex(), "value": value.hex()}] + changelist: list[dict[str, str]] = [{"action": "insert", "key": key.hex(), "value": value.hex()}] res = await data_rpc_api.batch_update({"id": store_id.hex(), "changelist": changelist}) update_tx_rec0 = res["tx_id"] await farm_block_with_spend(full_node_api, ph, update_tx_rec0, wallet_rpc_api) @@ -3794,10 +3795,10 @@ async def test_auto_subscribe_to_local_stores( manage_data_interval = 5 fake_store = bytes32([1] * 32) - async def mock_get_store_ids(self: Any) -> Set[bytes32]: + async def mock_get_store_ids(self: Any) -> set[bytes32]: return {fake_store} - async def mock_dl_track_new(self: Any, request: Dict[str, Any]) -> Dict[str, Any]: + async def mock_dl_track_new(self: Any, request: dict[str, Any]) -> dict[str, Any]: # ignore and just return empty response return {} @@ -3843,7 +3844,7 @@ async def test_local_store_exception( manage_data_interval = 5 fake_store = bytes32([1] * 32) - async def mock_get_store_ids(self: Any) -> Set[bytes32]: + async def mock_get_store_ids(self: Any) -> set[bytes32]: return {fake_store} with monkeypatch.context() as m, caplog.at_level(logging.INFO): diff --git a/chia/_tests/core/data_layer/test_data_store.py b/chia/_tests/core/data_layer/test_data_store.py index 1c21a6ed9e1d..af0b5cd45b4a 100644 --- a/chia/_tests/core/data_layer/test_data_store.py +++ b/chia/_tests/core/data_layer/test_data_store.py @@ -7,10 +7,11 @@ import re import statistics import time +from collections.abc import Awaitable from dataclasses import dataclass from pathlib import Path from random import Random -from typing import Any, Awaitable, Callable, Dict, List, Optional, Set, Tuple, cast +from typing import Any, Callable, Optional, cast import aiohttp import aiosqlite @@ -55,7 +56,7 @@ pytestmark = pytest.mark.data_layer -table_columns: Dict[str, List[str]] = { +table_columns: dict[str, list[str]] = { "node": ["hash", "node_type", "left", "right", "key", "value"], "root": ["tree_id", "generation", "node_hash", "status"], } @@ -66,7 +67,7 @@ @pytest.mark.anyio -async def test_valid_node_values_fixture_are_valid(data_store: DataStore, valid_node_values: Dict[str, Any]) -> None: +async def test_valid_node_values_fixture_are_valid(data_store: DataStore, valid_node_values: dict[str, Any]) -> None: async with data_store.db_wrapper.writer() as writer: await writer.execute( """ @@ -80,7 +81,7 @@ async def test_valid_node_values_fixture_are_valid(data_store: DataStore, valid_ @pytest.mark.parametrize(argnames=["table_name", "expected_columns"], argvalues=table_columns.items()) @pytest.mark.anyio async def test_create_creates_tables_and_columns( - database_uri: str, table_name: str, expected_columns: List[str] + database_uri: str, table_name: str, expected_columns: list[str] ) -> None: # Never string-interpolate sql queries... Except maybe in tests when it does not # allow you to parametrize the query. @@ -298,7 +299,7 @@ async def test_get_ancestors(data_store: DataStore, store_id: bytes32) -> None: @pytest.mark.anyio async def test_get_ancestors_optimized(data_store: DataStore, store_id: bytes32) -> None: - ancestors: List[Tuple[int, bytes32, List[InternalNode]]] = [] + ancestors: list[tuple[int, bytes32, list[InternalNode]]] = [] random = Random() random.seed(100, version=2) @@ -385,16 +386,16 @@ async def test_batch_update( ) -> None: total_operations = 1000 if use_optimized else 100 num_ops_per_batch = total_operations // num_batches - saved_batches: List[List[Dict[str, Any]]] = [] - saved_kv: List[List[TerminalNode]] = [] + saved_batches: list[list[dict[str, Any]]] = [] + saved_kv: list[list[TerminalNode]] = [] db_uri = generate_in_memory_db_uri() async with DataStore.managed(database=db_uri, uri=True) as single_op_data_store: await single_op_data_store.create_tree(store_id, status=Status.COMMITTED) random = Random() random.seed(100, version=2) - batch: List[Dict[str, Any]] = [] - keys_values: Dict[bytes, bytes] = {} + batch: list[dict[str, Any]] = [] + keys_values: dict[bytes, bytes] = {} for operation in range(num_batches * num_ops_per_batch): [op_type] = random.choices( ["insert", "upsert-insert", "upsert-update", "delete"], @@ -468,8 +469,8 @@ async def test_batch_update( assert {node.key: node.value for node in current_kv} == { node.key: node.value for node in saved_kv[batch_number] } - queue: List[bytes32] = [root.node_hash] - ancestors: Dict[bytes32, bytes32] = {} + queue: list[bytes32] = [root.node_hash] + ancestors: dict[bytes32, bytes32] = {} while len(queue) > 0: node_hash = queue.pop(0) expected_ancestors = [] @@ -1124,7 +1125,7 @@ async def test_kv_diff(data_store: DataStore, store_id: bytes32) -> None: random = Random() random.seed(100, version=2) insertions = 0 - expected_diff: Set[DiffData] = set() + expected_diff: set[DiffData] = set() root_start = None for i in range(500): key = (i + 100).to_bytes(4, byteorder="big") @@ -1422,7 +1423,7 @@ async def test_data_server_files( group_files_by_store: bool, tmp_path: Path, ) -> None: - roots: List[Root] = [] + roots: list[Root] = [] num_batches = 10 num_ops_per_batch = 100 @@ -1432,11 +1433,11 @@ async def test_data_server_files( random = Random() random.seed(100, version=2) - keys: List[bytes] = [] + keys: list[bytes] = [] counter = 0 for batch in range(num_batches): - changelist: List[Dict[str, Any]] = [] + changelist: list[dict[str, Any]] = [] for operation in range(num_ops_per_batch): if random.randint(0, 4) > 0 or len(keys) == 0: key = counter.to_bytes(4, byteorder="big") @@ -2156,7 +2157,7 @@ async def test_update_keys(data_store: DataStore, store_id: bytes32, use_upsert: num_values = 10 new_keys = 10 for value in range(num_values): - changelist: List[Dict[str, Any]] = [] + changelist: list[dict[str, Any]] = [] bytes_value = value.to_bytes(4, byteorder="big") if use_upsert: for key in range(num_keys): @@ -2201,10 +2202,10 @@ async def test_migration_unknown_version(data_store: DataStore) -> None: async def _check_ancestors( data_store: DataStore, store_id: bytes32, root_hash: bytes32 -) -> Dict[bytes32, Optional[bytes32]]: - ancestors: Dict[bytes32, Optional[bytes32]] = {} +) -> dict[bytes32, Optional[bytes32]]: + ancestors: dict[bytes32, Optional[bytes32]] = {} root_node: Node = await data_store.get_node(root_hash) - queue: List[Node] = [root_node] + queue: list[Node] = [root_node] while queue: node = queue.pop(0) @@ -2231,7 +2232,7 @@ async def _check_ancestors( @pytest.mark.anyio async def test_build_ancestor_table(data_store: DataStore, store_id: bytes32) -> None: num_values = 1000 - changelist: List[Dict[str, Any]] = [] + changelist: list[dict[str, Any]] = [] for value in range(num_values): value_bytes = value.to_bytes(4, byteorder="big") changelist.append({"action": "upsert", "key": value_bytes, "value": value_bytes}) @@ -2295,12 +2296,12 @@ async def test_sparse_ancestor_table(data_store: DataStore, store_id: bytes32) - assert previous_generation_count == 184 -async def get_all_nodes(data_store: DataStore, store_id: bytes32) -> List[Node]: +async def get_all_nodes(data_store: DataStore, store_id: bytes32) -> list[Node]: root = await data_store.get_tree_root(store_id) assert root.node_hash is not None root_node = await data_store.get_node(root.node_hash) - nodes: List[Node] = [] - queue: List[Node] = [root_node] + nodes: list[Node] = [] + queue: list[Node] = [root_node] while len(queue) > 0: node = queue.pop(0) @@ -2317,7 +2318,7 @@ async def get_all_nodes(data_store: DataStore, store_id: bytes32) -> List[Node]: @pytest.mark.anyio async def test_get_nodes(data_store: DataStore, store_id: bytes32) -> None: num_values = 50 - changelist: List[Dict[str, Any]] = [] + changelist: list[dict[str, Any]] = [] for value in range(num_values): value_bytes = value.to_bytes(4, byteorder="big") @@ -2349,11 +2350,11 @@ async def test_get_leaf_at_minimum_height( ) -> None: num_values = 1000 value_offset = 1000000 - all_min_leafs: Set[TerminalNode] = set() + all_min_leafs: set[TerminalNode] = set() if pre > 0: # This builds a complete binary tree, in order to test more than one batch in the queue before finding the leaf - changelist: List[Dict[str, Any]] = [] + changelist: list[dict[str, Any]] = [] for value in range(pre): value_bytes = (value * value).to_bytes(8, byteorder="big") @@ -2375,12 +2376,12 @@ async def test_get_leaf_at_minimum_height( ) if (value + 1) % batch_size == 0: - hash_to_parent: Dict[bytes32, InternalNode] = {} + hash_to_parent: dict[bytes32, InternalNode] = {} root = await data_store.get_tree_root(store_id) assert root.node_hash is not None min_leaf = await data_store.get_leaf_at_minimum_height(root.node_hash, hash_to_parent) all_nodes = await get_all_nodes(data_store, store_id) - heights: Dict[bytes32, int] = {} + heights: dict[bytes32, int] = {} heights[root.node_hash] = 0 min_leaf_height = None diff --git a/chia/_tests/core/data_layer/test_data_store_schema.py b/chia/_tests/core/data_layer/test_data_store_schema.py index 46a474f2f095..72c6500344d0 100644 --- a/chia/_tests/core/data_layer/test_data_store_schema.py +++ b/chia/_tests/core/data_layer/test_data_store_schema.py @@ -1,7 +1,7 @@ from __future__ import annotations import sqlite3 -from typing import Any, Dict +from typing import Any import pytest @@ -35,7 +35,7 @@ async def test_node_hash_must_be_32( data_store: DataStore, store_id: bytes32, length: int, - valid_node_values: Dict[str, Any], + valid_node_values: dict[str, Any], ) -> None: valid_node_values["hash"] = bytes([0] * length) @@ -54,7 +54,7 @@ async def test_node_hash_must_be_32( async def test_node_hash_must_not_be_null( data_store: DataStore, store_id: bytes32, - valid_node_values: Dict[str, Any], + valid_node_values: dict[str, Any], ) -> None: valid_node_values["hash"] = None @@ -74,7 +74,7 @@ async def test_node_type_must_be_valid( data_store: DataStore, node_type: NodeType, bad_node_type: int, - valid_node_values: Dict[str, Any], + valid_node_values: dict[str, Any], ) -> None: valid_node_values["node_type"] = bad_node_type diff --git a/chia/_tests/core/data_layer/util.py b/chia/_tests/core/data_layer/util.py index 84cbdd2d2551..406309f65a24 100644 --- a/chia/_tests/core/data_layer/util.py +++ b/chia/_tests/core/data_layer/util.py @@ -6,8 +6,9 @@ import pathlib import shutil import subprocess +from collections.abc import Iterator from dataclasses import dataclass -from typing import IO, TYPE_CHECKING, Any, Dict, Iterator, List, Literal, Optional, Union, overload +from typing import IO, TYPE_CHECKING, Any, Literal, Optional, Union, overload from chia.data_layer.data_layer_util import InternalNode, Node, NodeType, Side, Status, TerminalNode from chia.data_layer.data_store import DataStore @@ -49,7 +50,7 @@ async def general_insert( @dataclass(frozen=True) class Example: expected: Node - terminal_nodes: List[bytes32] + terminal_nodes: list[bytes32] async def add_0123_example(data_store: DataStore, store_id: bytes32) -> Example: @@ -121,7 +122,7 @@ class ChiaRoot: def run( self, - args: List[Union[str, os_PathLike_str]], + args: list[Union[str, os_PathLike_str]], *other_args: Any, check: bool = True, encoding: str = "utf-8", @@ -141,13 +142,13 @@ def run( chia_executable = shutil.which("chia") if chia_executable is None: chia_executable = "chia" - modified_args: List[Union[str, os_PathLike_str]] = [ + modified_args: list[Union[str, os_PathLike_str]] = [ self.scripts_path.joinpath(chia_executable), "--root-path", self.path, *args, ] - processed_args: List[str] = [os.fspath(element) for element in modified_args] + processed_args: list[str] = [os.fspath(element) for element in modified_args] final_args = [processed_args, *other_args] kwargs["check"] = check @@ -191,20 +192,20 @@ def create_valid_node_values( node_type: Literal[NodeType.INTERNAL], left_hash: bytes32, right_hash: bytes32, -) -> Dict[str, Any]: ... +) -> dict[str, Any]: ... @overload def create_valid_node_values( node_type: Literal[NodeType.TERMINAL], -) -> Dict[str, Any]: ... +) -> dict[str, Any]: ... def create_valid_node_values( node_type: NodeType, left_hash: Optional[bytes32] = None, right_hash: Optional[bytes32] = None, -) -> Dict[str, Any]: +) -> dict[str, Any]: if node_type == NodeType.INTERNAL: assert left_hash is not None assert right_hash is not None diff --git a/chia/_tests/core/farmer/test_farmer_api.py b/chia/_tests/core/farmer/test_farmer_api.py index 2dc8b2186440..d05defe5fc36 100644 --- a/chia/_tests/core/farmer/test_farmer_api.py +++ b/chia/_tests/core/farmer/test_farmer_api.py @@ -1,7 +1,8 @@ from __future__ import annotations from asyncio import Task, create_task, gather, sleep -from typing import Any, Coroutine, Optional, TypeVar +from collections.abc import Coroutine +from typing import Any, Optional, TypeVar import pytest diff --git a/chia/_tests/core/full_node/full_sync/test_full_sync.py b/chia/_tests/core/full_node/full_sync/test_full_sync.py index d0a2ec99b3c2..dff1924d47a3 100644 --- a/chia/_tests/core/full_node/full_sync/test_full_sync.py +++ b/chia/_tests/core/full_node/full_sync/test_full_sync.py @@ -5,7 +5,6 @@ import dataclasses import logging import time -from typing import List import pytest @@ -31,7 +30,7 @@ class TestFullSync: async def test_long_sync_from_zero(self, five_nodes, default_400_blocks, bt, self_hostname): # Must be larger than "sync_block_behind_threshold" in the config num_blocks = len(default_400_blocks) - blocks: List[FullBlock] = default_400_blocks + blocks: list[FullBlock] = default_400_blocks full_node_1, full_node_2, full_node_3, full_node_4, full_node_5 = five_nodes server_1 = full_node_1.full_node.server server_2 = full_node_2.full_node.server @@ -345,7 +344,7 @@ async def async_mock(): assert peak1 is not None summary_heights = full_node_1.full_node.blockchain.get_ses_heights() - summaries: List[SubEpochSummary] = [] + summaries: list[SubEpochSummary] = [] # get ses list for sub_epoch_n, ses_height in enumerate(summary_heights): @@ -395,7 +394,7 @@ async def test_sync_none_wp_response_backward_comp(self, three_nodes, default_10 await server_2.start_client(PeerInfo(self_hostname, server_1.get_port()), full_node_2.full_node.on_connect) await server_3.start_client(PeerInfo(self_hostname, server_1.get_port()), full_node_3.full_node.on_connect) - peers: List = [c for c in full_node_2.full_node.server.all_connections.values()] + peers: list = [c for c in full_node_2.full_node.server.all_connections.values()] request = full_node_protocol.RequestProofOfWeight( blocks_950[-1].height + 1, default_1000_blocks[-1].header_hash ) @@ -406,7 +405,7 @@ async def test_sync_none_wp_response_backward_comp(self, three_nodes, default_10 log.info(f"result was {res}") assert duration < 1 - peers: List = [c for c in full_node_3.full_node.server.all_connections.values()] + peers: list = [c for c in full_node_3.full_node.server.all_connections.values()] request = full_node_protocol.RequestProofOfWeight( blocks_950[-1].height + 1, default_1000_blocks[-1].header_hash ) diff --git a/chia/_tests/core/full_node/ram_db.py b/chia/_tests/core/full_node/ram_db.py index 114a992106fc..115c9f42c5e5 100644 --- a/chia/_tests/core/full_node/ram_db.py +++ b/chia/_tests/core/full_node/ram_db.py @@ -2,8 +2,8 @@ import contextlib import random +from collections.abc import AsyncIterator from pathlib import Path -from typing import AsyncIterator, Tuple from chia.consensus.blockchain import Blockchain from chia.consensus.constants import ConsensusConstants @@ -15,7 +15,7 @@ @contextlib.asynccontextmanager async def create_ram_blockchain( consensus_constants: ConsensusConstants, -) -> AsyncIterator[Tuple[DBWrapper2, Blockchain]]: +) -> AsyncIterator[tuple[DBWrapper2, Blockchain]]: uri = f"file:db_{random.randint(0, 99999999)}?mode=memory&cache=shared" async with DBWrapper2.managed(database=uri, uri=True, reader_count=1, db_version=2) as db_wrapper: block_store = await BlockStore.create(db_wrapper) diff --git a/chia/_tests/core/full_node/stores/test_block_store.py b/chia/_tests/core/full_node/stores/test_block_store.py index d4cfb6ede6db..0fa0e4a1d2c1 100644 --- a/chia/_tests/core/full_node/stores/test_block_store.py +++ b/chia/_tests/core/full_node/stores/test_block_store.py @@ -5,7 +5,7 @@ import random import sqlite3 from pathlib import Path -from typing import List, Optional, cast +from typing import Optional, cast import pytest @@ -153,7 +153,7 @@ async def test_deadlock(tmp_dir: Path, db_version: int, bt: BlockTools, use_cach for block in blocks: await _validate_and_add_block(bc, block) block_records.append(bc.block_record(block.header_hash)) - tasks: List[asyncio.Task[object]] = [] + tasks: list[asyncio.Task[object]] = [] for i in range(10000): rand_i = random.randint(0, 9) diff --git a/chia/_tests/core/full_node/stores/test_coin_store.py b/chia/_tests/core/full_node/stores/test_coin_store.py index 04afddd97688..fb7e670243ca 100644 --- a/chia/_tests/core/full_node/stores/test_coin_store.py +++ b/chia/_tests/core/full_node/stores/test_coin_store.py @@ -3,7 +3,7 @@ import logging from dataclasses import dataclass from pathlib import Path -from typing import List, Optional, Set, Tuple +from typing import Optional import pytest from clvm.casts import int_to_bytes @@ -38,7 +38,7 @@ log = logging.getLogger(__name__) -def get_future_reward_coins(block: FullBlock) -> Tuple[Coin, Coin]: +def get_future_reward_coins(block: FullBlock) -> tuple[Coin, Coin]: pool_amount = calculate_pool_reward(block.height) farmer_amount = calculate_base_farmer_reward(block.height) if block.is_transaction_block(): @@ -70,7 +70,7 @@ async def test_basic_coin_store(db_version: int, softfork_height: uint32, bt: Bl pool_reward_puzzle_hash=reward_ph, ) - coins_to_spend: List[Coin] = [] + coins_to_spend: list[Coin] = [] for block in blocks: if block.is_transaction_block(): for coin in block.get_included_reward_coins(): @@ -91,8 +91,8 @@ async def test_basic_coin_store(db_version: int, softfork_height: uint32, bt: Bl ) # Adding blocks to the coin store - should_be_included_prev: Set[Coin] = set() - should_be_included: Set[Coin] = set() + should_be_included_prev: set[Coin] = set() + should_be_included: set[Coin] = set() for block in blocks: farmer_coin, pool_coin = get_future_reward_coins(block) should_be_included.add(farmer_coin) @@ -178,8 +178,8 @@ async def test_set_spent(db_version: int, bt: BlockTools) -> None: # Save/get block for block in blocks: if block.is_transaction_block(): - removals: List[bytes32] = [] - additions: List[Coin] = [] + removals: list[bytes32] = [] + additions: list[Coin] = [] async with db_wrapper.writer(): if block.is_transaction_block(): assert block.foliage_transaction_block is not None @@ -229,8 +229,8 @@ async def test_num_unspent(bt: BlockTools, db_version: int) -> None: if block.is_transaction_block(): assert block.foliage_transaction_block is not None - removals: List[bytes32] = [] - additions: List[Coin] = [] + removals: list[bytes32] = [] + additions: list[Coin] = [] await coin_store.new_block( block.height, block.foliage_transaction_block.timestamp, @@ -255,13 +255,13 @@ async def test_rollback(db_version: int, bt: BlockTools) -> None: coin_store = await CoinStore.create(db_wrapper) selected_coin: Optional[CoinRecord] = None - all_coins: List[Coin] = [] + all_coins: list[Coin] = [] for block in blocks: all_coins += list(block.get_included_reward_coins()) if block.is_transaction_block(): - removals: List[bytes32] = [] - additions: List[Coin] = [] + removals: list[bytes32] = [] + additions: list[Coin] = [] assert block.foliage_transaction_block is not None await coin_store.new_block( block.height, @@ -343,7 +343,7 @@ async def test_basic_reorg(tmp_dir: Path, db_version: int, bt: BlockTools) -> No store = await BlockStore.create(db_wrapper) b: Blockchain = await Blockchain.create(coin_store, store, bt.constants, tmp_dir, 2) try: - records: List[Optional[CoinRecord]] = [] + records: list[Optional[CoinRecord]] = [] for block in blocks: await _validate_and_add_block(b, block) @@ -501,16 +501,16 @@ async def test_get_coin_states(db_version: int) -> None: @dataclass(frozen=True) class RandomCoinRecords: - items: List[CoinRecord] - puzzle_hashes: List[bytes32] - hints: List[Tuple[bytes32, bytes]] + items: list[CoinRecord] + puzzle_hashes: list[bytes32] + hints: list[tuple[bytes32, bytes]] @pytest.fixture(scope="session") def random_coin_records() -> RandomCoinRecords: - coin_records: List[CoinRecord] = [] - puzzle_hashes: List[bytes32] = [] - hints: List[Tuple[bytes32, bytes]] = [] + coin_records: list[CoinRecord] = [] + puzzle_hashes: list[bytes32] = [] + hints: list[tuple[bytes32, bytes]] = [] for i in range(50000): is_spent = i % 2 == 0 @@ -586,7 +586,7 @@ async def test_coin_state_batches( expected_crs.append(cr) height: Optional[uint32] = uint32(0) - all_coin_states: List[CoinState] = [] + all_coin_states: list[CoinState] = [] remaining_phs = random_coin_records.puzzle_hashes.copy() def height_of(coin_state: CoinState) -> int: @@ -637,7 +637,7 @@ async def test_batch_many_coin_states(db_version: int, cut_off_middle: bool) -> ph = bytes32(b"0" * 32) # Generate coin records. - coin_records: List[CoinRecord] = [] + coin_records: list[CoinRecord] = [] count = 50000 for i in range(count): @@ -760,7 +760,7 @@ class UnspentLineageInfoTestItem: @dataclass class UnspentLineageInfoCase: id: str - items: List[UnspentLineageInfoTestItem] + items: list[UnspentLineageInfoTestItem] expected_success: bool parent_with_diff_amount: bool = False marks: Marks = () @@ -831,7 +831,7 @@ class UnspentLineageInfoCase: ), ) async def test_get_unspent_lineage_info_for_puzzle_hash(case: UnspentLineageInfoCase) -> None: - CoinRecordRawData = Tuple[ + CoinRecordRawData = tuple[ bytes, # coin_name (blob) int, # confirmed_index (bigint) int, # spent_index (bigint) @@ -842,7 +842,7 @@ async def test_get_unspent_lineage_info_for_puzzle_hash(case: UnspentLineageInfo int, # timestamp (bigint) ] - def make_test_data(test_items: List[UnspentLineageInfoTestItem]) -> List[CoinRecordRawData]: + def make_test_data(test_items: list[UnspentLineageInfoTestItem]) -> list[CoinRecordRawData]: test_data = [] for item in test_items: test_data.append( diff --git a/chia/_tests/core/full_node/stores/test_full_node_store.py b/chia/_tests/core/full_node/stores/test_full_node_store.py index 7e0412f2a662..7ead0a57a4da 100644 --- a/chia/_tests/core/full_node/stores/test_full_node_store.py +++ b/chia/_tests/core/full_node/stores/test_full_node_store.py @@ -2,7 +2,8 @@ import logging import random -from typing import AsyncIterator, Dict, List, Optional, Tuple +from collections.abc import AsyncIterator +from typing import Optional import pytest @@ -82,7 +83,7 @@ async def test_unfinished_block_rank( # create variants of the unfinished block, where all we do is to change # the foliage_transaction_block_hash. As if they all had different foliage, # but the same reward block hash (i.e. the same proof-of-space) - unfinished: List[UnfinishedBlock] = [ + unfinished: list[UnfinishedBlock] = [ recursive_replace(unf, "foliage.foliage_transaction_block_hash", bytes32([idx + 4] * 32)) for idx in range(num_duplicates) ] @@ -130,12 +131,12 @@ async def test_unfinished_block_rank( ) async def test_find_best_block( seeded_random: random.Random, - blocks: List[Tuple[Optional[int], bool]], + blocks: list[tuple[Optional[int], bool]], expected: Optional[int], - default_400_blocks: List[FullBlock], + default_400_blocks: list[FullBlock], bt: BlockTools, ) -> None: - result: Dict[Optional[bytes32], UnfinishedBlockEntry] = {} + result: dict[Optional[bytes32], UnfinishedBlockEntry] = {} i = 0 for b, with_unf in blocks: unf: Optional[UnfinishedBlock] @@ -958,7 +959,7 @@ async def test_basic_store( else: case_1 = True assert res.new_infusion_points == [] - found_ips: List[timelord_protocol.NewInfusionPointVDF] = [] + found_ips: list[timelord_protocol.NewInfusionPointVDF] = [] peak = blockchain.get_peak() for ss in block.finished_sub_slots: @@ -1015,7 +1016,7 @@ async def test_basic_store( and i1 > (i2 + 3) ): # We hit all the conditions that we want - all_sps: List[Optional[SignagePoint]] = [None] * custom_block_tools.constants.NUM_SPS_SUB_SLOT + all_sps: list[Optional[SignagePoint]] = [None] * custom_block_tools.constants.NUM_SPS_SUB_SLOT def assert_sp_none(sp_index: int, is_none: bool) -> None: sp_to_check: Optional[SignagePoint] = all_sps[sp_index] @@ -1126,7 +1127,7 @@ def assert_sp_none(sp_index: int, is_none: bool) -> None: @pytest.mark.anyio async def test_long_chain_slots( empty_blockchain_with_original_constants: Blockchain, - default_1000_blocks: List[FullBlock], + default_1000_blocks: list[FullBlock], ) -> None: blockchain = empty_blockchain_with_original_constants store = FullNodeStore(blockchain.constants) diff --git a/chia/_tests/core/full_node/stores/test_hint_store.py b/chia/_tests/core/full_node/stores/test_hint_store.py index aa130c0a8983..33d831975107 100644 --- a/chia/_tests/core/full_node/stores/test_hint_store.py +++ b/chia/_tests/core/full_node/stores/test_hint_store.py @@ -1,7 +1,6 @@ from __future__ import annotations import logging -from typing import Tuple import pytest from clvm.casts import int_to_bytes @@ -141,7 +140,7 @@ async def test_coin_ids_multi(db_version: int) -> None: @pytest.mark.anyio async def test_hints_in_blockchain( - wallet_nodes: Tuple[ + wallet_nodes: tuple[ FullNodeSimulator, FullNodeSimulator, ChiaServer, ChiaServer, WalletTool, WalletTool, BlockTools ], ) -> None: diff --git a/chia/_tests/core/full_node/test_conditions.py b/chia/_tests/core/full_node/test_conditions.py index 72a7fc4af2e8..b6d8c44bb51e 100644 --- a/chia/_tests/core/full_node/test_conditions.py +++ b/chia/_tests/core/full_node/test_conditions.py @@ -6,7 +6,7 @@ from __future__ import annotations import logging -from typing import List, Optional, Tuple +from typing import Optional import pytest from chia_rs import AugSchemeMPL, G2Element @@ -46,7 +46,7 @@ def cleanup_keyring(keyring: TempKeyring) -> None: EASY_PUZZLE_HASH = EASY_PUZZLE.get_tree_hash() -async def initial_blocks(bt: BlockTools, block_count: int = 4) -> List[FullBlock]: +async def initial_blocks(bt: BlockTools, block_count: int = 4) -> list[FullBlock]: blocks = bt.get_consecutive_blocks( block_count, guarantee_transaction_block=True, @@ -60,10 +60,10 @@ async def initial_blocks(bt: BlockTools, block_count: int = 4) -> List[FullBlock async def check_spend_bundle_validity( bt: BlockTools, - blocks: List[FullBlock], + blocks: list[FullBlock], spend_bundle: SpendBundle, expected_err: Optional[Err] = None, -) -> Tuple[List[CoinRecord], List[CoinRecord], FullBlock]: +) -> tuple[list[CoinRecord], list[CoinRecord], FullBlock]: """ This test helper create an extra block after the given blocks that contains the given `SpendBundle`, and then invokes `add_block` to ensure that it's accepted (if `expected_err=None`) @@ -103,7 +103,7 @@ async def check_conditions( spend_reward_index: int = -2, *, aggsig: G2Element = G2Element(), -) -> Tuple[List[CoinRecord], List[CoinRecord], FullBlock]: +) -> tuple[list[CoinRecord], list[CoinRecord], FullBlock]: blocks = await initial_blocks(bt) coin = blocks[spend_reward_index].get_included_reward_coins()[0] diff --git a/chia/_tests/core/full_node/test_full_node.py b/chia/_tests/core/full_node/test_full_node.py index 7440df15d28a..0d4616c58632 100644 --- a/chia/_tests/core/full_node/test_full_node.py +++ b/chia/_tests/core/full_node/test_full_node.py @@ -6,7 +6,8 @@ import logging import random import time -from typing import Coroutine, Dict, List, Optional, Tuple +from collections.abc import Coroutine +from typing import Optional import pytest from chia_rs import AugSchemeMPL, G2Element, PrivateKey @@ -114,7 +115,7 @@ async def get_block_path(full_node: FullNodeAPI): @pytest.mark.anyio async def test_sync_no_farmer( setup_two_nodes_and_wallet, - default_1000_blocks: List[FullBlock], + default_1000_blocks: list[FullBlock], self_hostname: str, seeded_random: random.Random, ): @@ -416,7 +417,7 @@ async def check_transaction_confirmed(transaction) -> bool: height = full_node_1.full_node.blockchain.get_peak().height blockchain = empty_blockchain - all_blocks: List[FullBlock] = await full_node_1.get_all_full_blocks() + all_blocks: list[FullBlock] = await full_node_1.get_all_full_blocks() assert height == len(all_blocks) - 1 if test_reorgs: @@ -833,7 +834,7 @@ async def test_new_transaction_and_mempool(self, wallet_nodes, self_hostname, se puzzle_hashes = [] # Makes a bunch of coins - conditions_dict: Dict = {ConditionOpcode.CREATE_COIN: []} + conditions_dict: dict = {ConditionOpcode.CREATE_COIN: []} # This should fit in one transaction for _ in range(100): receiver_puzzlehash = wallet_receiver.get_new_puzzlehash() @@ -1306,7 +1307,7 @@ async def test_new_unfinished_block2_forward_limit(self, wallet_nodes, self_host # best block we've already seen, so we may need to send more than 3 # blocks to the node for it to forward 3 - unf_blocks: List[UnfinishedBlock] = [] + unf_blocks: list[UnfinishedBlock] = [] last_reward_hash: Optional[bytes32] = None for idx in range(0, 6): @@ -1475,7 +1476,7 @@ async def test_unfinished_block_with_replaced_generator(self, wallet_nodes, self else: reward_chain_block = block.reward_chain_block.get_unfinished() - generator_refs: List[uint32] = [] + generator_refs: list[uint32] = [] if committment > 6: generator_refs = [uint32(n) for n in range(600)] @@ -1512,7 +1513,7 @@ async def test_double_blocks_same_pospace(self, wallet_nodes, self_hostname): for i in range(2): await full_node_1.farm_new_transaction_block(FarmNewBlockProtocol(ph)) - blocks: List[FullBlock] = await full_node_1.get_all_full_blocks() + blocks: list[FullBlock] = await full_node_1.get_all_full_blocks() coin = blocks[-1].get_included_reward_coins()[0] tx = wallet_a.generate_signed_transaction(10000, wallet_receiver.get_new_puzzlehash(), coin) @@ -2174,9 +2175,9 @@ async def coro(full_node, compact_proof): @pytest.mark.anyio async def test_invalid_capability_can_connect( self, - two_nodes: Tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools], + two_nodes: tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools], self_hostname: str, - custom_capabilities: List[Tuple[uint16, str]], + custom_capabilities: list[tuple[uint16, str]], expect_success: bool, ) -> None: # TODO: consider not testing this against both DB v1 and v2? @@ -2256,9 +2257,9 @@ async def test_wallet_sync_task_failure( async def test_long_reorg( light_blocks: bool, one_node_one_block, - default_10000_blocks: List[FullBlock], - test_long_reorg_1500_blocks: List[FullBlock], - test_long_reorg_1500_blocks_light: List[FullBlock], + default_10000_blocks: list[FullBlock], + test_long_reorg_1500_blocks: list[FullBlock], + test_long_reorg_1500_blocks_light: list[FullBlock], seeded_random: random.Random, ): node, server, bt = one_node_one_block @@ -2345,11 +2346,11 @@ async def test_long_reorg_nodes( chain_length: int, fork_point: int, three_nodes, - default_10000_blocks: List[FullBlock], - test_long_reorg_blocks: List[FullBlock], - test_long_reorg_blocks_light: List[FullBlock], - test_long_reorg_1500_blocks: List[FullBlock], - test_long_reorg_1500_blocks_light: List[FullBlock], + default_10000_blocks: list[FullBlock], + test_long_reorg_blocks: list[FullBlock], + test_long_reorg_blocks_light: list[FullBlock], + test_long_reorg_1500_blocks: list[FullBlock], + test_long_reorg_1500_blocks_light: list[FullBlock], self_hostname: str, seeded_random: random.Random, ): diff --git a/chia/_tests/core/full_node/test_generator_tools.py b/chia/_tests/core/full_node/test_generator_tools.py index f3b303f2100e..c722a4a4db75 100644 --- a/chia/_tests/core/full_node/test_generator_tools.py +++ b/chia/_tests/core/full_node/test_generator_tools.py @@ -1,7 +1,5 @@ from __future__ import annotations -from typing import List - from chia.types.blockchain_format.coin import Coin from chia.types.blockchain_format.sized_bytes import bytes32 from chia.types.spend_bundle_conditions import SpendBundleConditions, SpendConditions @@ -12,7 +10,7 @@ coin_ids = [std_hash(i.to_bytes(4, "big")) for i in range(10)] parent_ids = [std_hash(i.to_bytes(4, "big")) for i in range(10)] phs = [std_hash(i.to_bytes(4, "big")) for i in range(10)] -spends: List[SpendConditions] = [ +spends: list[SpendConditions] = [ SpendConditions( coin_ids[0], parent_ids[0], diff --git a/chia/_tests/core/full_node/test_hint_management.py b/chia/_tests/core/full_node/test_hint_management.py index 343e31568111..3536552bba28 100644 --- a/chia/_tests/core/full_node/test_hint_management.py +++ b/chia/_tests/core/full_node/test_hint_management.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import List, Optional +from typing import Optional import pytest @@ -59,7 +59,7 @@ async def test_lookup_coin_ids(bt: BlockTools, empty_blockchain: Blockchain) -> br: Optional[BlockRecord] = empty_blockchain.get_peak() assert br is not None - rewards: List[Coin] = [ + rewards: list[Coin] = [ Coin(coin_ids[8], phs[8], uint64(1)), Coin(coin_ids[9], phs[9], uint64(2)), Coin(coin_ids[5], phs[8], uint64(1234)), diff --git a/chia/_tests/core/full_node/test_performance.py b/chia/_tests/core/full_node/test_performance.py index e3efee2f5bc0..bc6d6e53e6ed 100644 --- a/chia/_tests/core/full_node/test_performance.py +++ b/chia/_tests/core/full_node/test_performance.py @@ -4,7 +4,6 @@ import dataclasses import logging import random -from typing import Dict import pytest from clvm.casts import int_to_bytes @@ -68,7 +67,7 @@ async def test_full_block_performance( # Makes a bunch of coins for i in range(20): - conditions_dict: Dict = {ConditionOpcode.CREATE_COIN: []} + conditions_dict: dict = {ConditionOpcode.CREATE_COIN: []} # This should fit in one transaction for _ in range(100): receiver_puzzlehash = wallet_receiver.get_new_puzzlehash() diff --git a/chia/_tests/core/full_node/test_tx_processing_queue.py b/chia/_tests/core/full_node/test_tx_processing_queue.py index 265ac3a05299..228d5aed75e4 100644 --- a/chia/_tests/core/full_node/test_tx_processing_queue.py +++ b/chia/_tests/core/full_node/test_tx_processing_queue.py @@ -4,7 +4,7 @@ import logging import random from dataclasses import dataclass -from typing import List, Optional, cast +from typing import Optional, cast import pytest @@ -90,7 +90,7 @@ async def test_lots_of_peers(seeded_random: random.Random) -> None: num_peers = 1000 num_txs = 100 total_txs = num_txs * num_peers - peer_ids: List[bytes32] = [bytes32.random(seeded_random) for _ in range(num_peers)] + peer_ids: list[bytes32] = [bytes32.random(seeded_random) for _ in range(num_peers)] # 100 txs per peer list_txs = [get_transaction_queue_entry(peer_id, i) for peer_id in peer_ids for i in range(num_txs)] @@ -112,7 +112,7 @@ async def test_full_queue(seeded_random: random.Random) -> None: num_peers = 100 num_txs = 1000 total_txs = num_txs * num_peers - peer_ids: List[bytes32] = [bytes32.random(seeded_random) for _ in range(num_peers)] + peer_ids: list[bytes32] = [bytes32.random(seeded_random) for _ in range(num_peers)] # 999 txs per peer then 1 to fail later list_txs = [get_transaction_queue_entry(peer_id, i) for peer_id in peer_ids for i in range(num_txs)] diff --git a/chia/_tests/core/make_block_generator.py b/chia/_tests/core/make_block_generator.py index c243014c4ea9..bad98496b2d9 100644 --- a/chia/_tests/core/make_block_generator.py +++ b/chia/_tests/core/make_block_generator.py @@ -1,7 +1,5 @@ from __future__ import annotations -from typing import Dict - from chia_rs import G1Element, G2Element, PrivateKey from chia.full_node.bundle_tools import simple_solution_generator @@ -25,7 +23,7 @@ def int_to_public_key(index: int) -> G1Element: return private_key_from_int.get_g1() -def puzzle_hash_for_index(index: int, puzzle_hash_db: Dict[bytes32, SerializedProgram]) -> bytes32: +def puzzle_hash_for_index(index: int, puzzle_hash_db: dict[bytes32, SerializedProgram]) -> bytes32: public_key: G1Element = int_to_public_key(index) puzzle = SerializedProgram.from_program(puzzle_for_pk(public_key)) puzzle_hash: bytes32 = puzzle.get_tree_hash() @@ -33,7 +31,7 @@ def puzzle_hash_for_index(index: int, puzzle_hash_db: Dict[bytes32, SerializedPr return puzzle_hash -def make_fake_coin(index: int, puzzle_hash_db: Dict[bytes32, SerializedProgram]) -> Coin: +def make_fake_coin(index: int, puzzle_hash_db: dict[bytes32, SerializedProgram]) -> Coin: """ Make a fake coin with parent id equal to the index (ie. a genesis block coin) @@ -45,14 +43,14 @@ def make_fake_coin(index: int, puzzle_hash_db: Dict[bytes32, SerializedProgram]) def conditions_for_payment(coin: Coin) -> Program: - d: Dict[bytes32, SerializedProgram] = {} # a throwaway db since we don't care + d: dict[bytes32, SerializedProgram] = {} # a throwaway db since we don't care new_puzzle_hash = puzzle_hash_for_index(int.from_bytes(coin.puzzle_hash, "big"), d) ret: Program = Program.to([[ConditionOpcode.CREATE_COIN, new_puzzle_hash, coin.amount]]) return ret def make_spend_bundle(count: int) -> SpendBundle: - puzzle_hash_db: Dict[bytes32, SerializedProgram] = {} + puzzle_hash_db: dict[bytes32, SerializedProgram] = {} coins = [make_fake_coin(_, puzzle_hash_db) for _ in range(count)] coin_spends = [] diff --git a/chia/_tests/core/mempool/test_mempool.py b/chia/_tests/core/mempool/test_mempool.py index ebd89da77799..a6151d2fc260 100644 --- a/chia/_tests/core/mempool/test_mempool.py +++ b/chia/_tests/core/mempool/test_mempool.py @@ -3,7 +3,7 @@ import dataclasses import logging import random -from typing import Callable, Dict, List, Optional, Tuple +from typing import Callable, Optional import pytest from chia_rs import G1Element, G2Element @@ -88,7 +88,7 @@ def wallet_a(bt: BlockTools) -> WalletTool: def generate_test_spend_bundle( wallet: WalletTool, coin: Coin, - condition_dic: Optional[Dict[ConditionOpcode, List[ConditionWithArgs]]] = None, + condition_dic: Optional[dict[ConditionOpcode, list[ConditionWithArgs]]] = None, fee: uint64 = uint64(0), amount: uint64 = uint64(1000), new_puzzle_hash: bytes32 = BURN_PUZZLE_HASH, @@ -305,7 +305,7 @@ def test_drain_height(self) -> None: class TestMempool: @pytest.mark.anyio async def test_basic_mempool( - self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool ) -> None: full_node_1, server_1, bt = one_node_one_block @@ -333,7 +333,7 @@ async def respond_transaction( peer: WSChiaConnection, tx_bytes: bytes = b"", test: bool = False, -) -> Tuple[MempoolInclusionStatus, Optional[Err]]: +) -> tuple[MempoolInclusionStatus, Optional[Err]]: """ Receives a full transaction from peer. If tx is added to mempool, send tx_id to others. (new_transaction) @@ -392,7 +392,7 @@ class TestMempoolManager: @pytest.mark.anyio async def test_basic_mempool_manager( self, - two_nodes_one_block: Tuple[FullNodeSimulator, FullNodeSimulator, ChiaServer, ChiaServer, BlockTools], + two_nodes_one_block: tuple[FullNodeSimulator, FullNodeSimulator, ChiaServer, ChiaServer, BlockTools], wallet_a: WalletTool, self_hostname: str, ) -> None: @@ -453,7 +453,7 @@ async def test_basic_mempool_manager( ) async def test_ephemeral_timelock( self, - one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], + one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool, opcode: ConditionOpcode, lock_value: int, @@ -493,7 +493,7 @@ def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: # another spend, even though the assert condition is duplicated 100 times @pytest.mark.anyio async def test_coin_announcement_duplicate_consumed( - self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool ) -> None: def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: announce = AssertCoinAnnouncement(asserted_id=coin_2.name(), asserted_msg=b"test") @@ -519,7 +519,7 @@ def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: # another spend, even though the create announcement is duplicated 100 times @pytest.mark.anyio async def test_coin_duplicate_announcement_consumed( - self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool ) -> None: def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: announce = AssertCoinAnnouncement(asserted_id=coin_2.name(), asserted_msg=b"test") @@ -544,7 +544,7 @@ def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: @pytest.mark.anyio async def test_double_spend( self, - two_nodes_one_block: Tuple[FullNodeSimulator, FullNodeSimulator, ChiaServer, ChiaServer, BlockTools], + two_nodes_one_block: tuple[FullNodeSimulator, FullNodeSimulator, ChiaServer, ChiaServer, BlockTools], wallet_a: WalletTool, self_hostname: str, ) -> None: @@ -592,7 +592,7 @@ async def test_double_spend( @pytest.mark.anyio async def test_double_spend_with_higher_fee( - self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool ) -> None: full_node_1, _, bt = one_node_one_block blocks = await full_node_1.get_all_full_blocks() @@ -675,7 +675,7 @@ async def test_double_spend_with_higher_fee( @pytest.mark.anyio async def test_invalid_signature( - self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool ) -> None: reward_ph = wallet_a.get_new_puzzlehash() @@ -709,13 +709,13 @@ async def test_invalid_signature( async def condition_tester( self, - one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], + one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool, - dic: Dict[ConditionOpcode, List[ConditionWithArgs]], + dic: dict[ConditionOpcode, list[ConditionWithArgs]], fee: int = 0, num_blocks: int = 3, coin: Optional[Coin] = None, - ) -> Tuple[List[FullBlock], SpendBundle, WSChiaConnection, MempoolInclusionStatus, Optional[Err]]: + ) -> tuple[list[FullBlock], SpendBundle, WSChiaConnection, MempoolInclusionStatus, Optional[Err]]: reward_ph = wallet_a.get_new_puzzlehash() full_node_1, server_1, bt = one_node_one_block blocks = await full_node_1.get_all_full_blocks() @@ -754,10 +754,10 @@ async def condition_tester( @pytest.mark.anyio async def condition_tester2( self, - node_server_bt: Tuple[FullNodeSimulator, ChiaServer, BlockTools], + node_server_bt: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool, test_fun: Callable[[Coin, Coin], SpendBundle], - ) -> Tuple[List[FullBlock], SpendBundle, MempoolInclusionStatus, Optional[Err]]: + ) -> tuple[list[FullBlock], SpendBundle, MempoolInclusionStatus, Optional[Err]]: reward_ph = wallet_a.get_new_puzzlehash() full_node_1, server_1, bt = node_server_bt blocks = await full_node_1.get_all_full_blocks() @@ -795,7 +795,7 @@ async def condition_tester2( @pytest.mark.anyio async def test_invalid_block_index( - self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool ) -> None: full_node_1, server_1, bt = one_node_one_block blocks = await full_node_1.get_all_full_blocks() @@ -814,7 +814,7 @@ async def test_invalid_block_index( @pytest.mark.anyio async def test_block_index_missing_arg( - self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool ) -> None: full_node_1, server_1, bt = one_node_one_block cvp = ConditionWithArgs(ConditionOpcode.ASSERT_HEIGHT_ABSOLUTE, []) @@ -828,7 +828,7 @@ async def test_block_index_missing_arg( @pytest.mark.anyio async def test_correct_block_index( - self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool ) -> None: full_node_1, server_1, bt = one_node_one_block cvp = ConditionWithArgs(ConditionOpcode.ASSERT_HEIGHT_ABSOLUTE, [int_to_bytes(1)]) @@ -841,7 +841,7 @@ async def test_correct_block_index( @pytest.mark.anyio async def test_block_index_garbage( - self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool ) -> None: full_node_1, server_1, bt = one_node_one_block # garbage at the end of the argument list is ignored in consensus mode, @@ -856,7 +856,7 @@ async def test_block_index_garbage( @pytest.mark.anyio async def test_negative_block_index( - self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool ) -> None: full_node_1, server_1, bt = one_node_one_block cvp = ConditionWithArgs(ConditionOpcode.ASSERT_HEIGHT_ABSOLUTE, [int_to_bytes(-1)]) @@ -869,7 +869,7 @@ async def test_negative_block_index( @pytest.mark.anyio async def test_invalid_block_age( - self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool ) -> None: full_node_1, server_1, bt = one_node_one_block cvp = ConditionWithArgs(ConditionOpcode.ASSERT_HEIGHT_RELATIVE, [int_to_bytes(5)]) @@ -883,7 +883,7 @@ async def test_invalid_block_age( @pytest.mark.anyio async def test_block_age_missing_arg( - self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool ) -> None: full_node_1, server_1, bt = one_node_one_block cvp = ConditionWithArgs(ConditionOpcode.ASSERT_HEIGHT_RELATIVE, []) @@ -897,7 +897,7 @@ async def test_block_age_missing_arg( @pytest.mark.anyio async def test_correct_block_age( - self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool ) -> None: full_node_1, server_1, bt = one_node_one_block cvp = ConditionWithArgs(ConditionOpcode.ASSERT_HEIGHT_RELATIVE, [int_to_bytes(1)]) @@ -913,7 +913,7 @@ async def test_correct_block_age( @pytest.mark.anyio async def test_block_age_garbage( - self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool ) -> None: full_node_1, server_1, bt = one_node_one_block # garbage at the end of the argument list is ignored in consensus mode, @@ -931,7 +931,7 @@ async def test_block_age_garbage( @pytest.mark.anyio async def test_negative_block_age( - self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool ) -> None: full_node_1, server_1, bt = one_node_one_block cvp = ConditionWithArgs(ConditionOpcode.ASSERT_HEIGHT_RELATIVE, [int_to_bytes(-1)]) @@ -947,7 +947,7 @@ async def test_negative_block_age( @pytest.mark.anyio async def test_correct_my_id( - self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool ) -> None: full_node_1, server_1, bt = one_node_one_block @@ -967,7 +967,7 @@ async def test_correct_my_id( @pytest.mark.anyio async def test_my_id_garbage( - self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool ) -> None: full_node_1, server_1, bt = one_node_one_block @@ -989,7 +989,7 @@ async def test_my_id_garbage( @pytest.mark.anyio async def test_invalid_my_id( - self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool ) -> None: full_node_1, server_1, bt = one_node_one_block @@ -1010,7 +1010,7 @@ async def test_invalid_my_id( @pytest.mark.anyio async def test_my_id_missing_arg( - self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool ) -> None: full_node_1, server_1, bt = one_node_one_block cvp = ConditionWithArgs(ConditionOpcode.ASSERT_MY_COIN_ID, []) @@ -1024,7 +1024,7 @@ async def test_my_id_missing_arg( @pytest.mark.anyio async def test_assert_time_exceeds( - self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool ) -> None: full_node_1, _, _ = one_node_one_block blockchain_peak = full_node_1.full_node.blockchain.get_peak() @@ -1043,7 +1043,7 @@ async def test_assert_time_exceeds( @pytest.mark.anyio async def test_assert_time_fail( - self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool ) -> None: full_node_1, _, _ = one_node_one_block blockchain_peak = full_node_1.full_node.blockchain.get_peak() @@ -1061,7 +1061,7 @@ async def test_assert_time_fail( @pytest.mark.anyio async def test_assert_height_pending( - self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool ) -> None: full_node_1, _, _ = one_node_one_block blockchain_peak = full_node_1.full_node.blockchain.get_peak() @@ -1078,7 +1078,7 @@ async def test_assert_height_pending( @pytest.mark.anyio async def test_assert_time_negative( - self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool ) -> None: full_node_1, server_1, bt = one_node_one_block time_now = -1 @@ -1093,7 +1093,7 @@ async def test_assert_time_negative( @pytest.mark.anyio async def test_assert_time_missing_arg( - self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool ) -> None: full_node_1, server_1, bt = one_node_one_block @@ -1107,7 +1107,7 @@ async def test_assert_time_missing_arg( @pytest.mark.anyio async def test_assert_time_garbage( - self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool ) -> None: full_node_1, _, _ = one_node_one_block blockchain_peak = full_node_1.full_node.blockchain.get_peak() @@ -1127,7 +1127,7 @@ async def test_assert_time_garbage( @pytest.mark.anyio async def test_assert_time_relative_exceeds( - self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool ) -> None: full_node_1, server_1, bt = one_node_one_block time_relative = 3 @@ -1155,7 +1155,7 @@ async def test_assert_time_relative_exceeds( @pytest.mark.anyio async def test_assert_time_relative_garbage( - self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool ) -> None: full_node_1, server_1, bt = one_node_one_block time_relative = 0 @@ -1173,7 +1173,7 @@ async def test_assert_time_relative_garbage( @pytest.mark.anyio async def test_assert_time_relative_missing_arg( - self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool ) -> None: full_node_1, server_1, bt = one_node_one_block @@ -1188,7 +1188,7 @@ async def test_assert_time_relative_missing_arg( @pytest.mark.anyio async def test_assert_time_relative_negative( - self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool ) -> None: full_node_1, server_1, bt = one_node_one_block time_relative = -3 @@ -1205,7 +1205,7 @@ async def test_assert_time_relative_negative( # ensure one spend can assert a coin announcement from another spend @pytest.mark.anyio async def test_correct_coin_announcement_consumed( - self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool ) -> None: def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: announce = AssertCoinAnnouncement(asserted_id=coin_2.name(), asserted_msg=b"test") @@ -1244,7 +1244,7 @@ async def test_coin_announcement_garbage( announce_garbage: bool, expected: Optional[Err], expected_included: MempoolInclusionStatus, - one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], + one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool, ) -> None: def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: @@ -1279,7 +1279,7 @@ def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: @pytest.mark.anyio async def test_coin_announcement_missing_arg( - self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool ) -> None: def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: # missing arg here @@ -1301,7 +1301,7 @@ def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: @pytest.mark.anyio async def test_coin_announcement_missing_arg2( - self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool ) -> None: def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: announce = AssertCoinAnnouncement(asserted_id=coin_2.name(), asserted_msg=b"test") @@ -1324,7 +1324,7 @@ def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: @pytest.mark.anyio async def test_coin_announcement_too_big( - self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool ) -> None: def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: announce = AssertCoinAnnouncement(asserted_id=coin_2.name(), asserted_msg=bytes([1] * 10000)) @@ -1360,7 +1360,7 @@ def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: # create announcement @pytest.mark.anyio async def test_invalid_coin_announcement_rejected( - self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool ) -> None: def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: announce = AssertCoinAnnouncement(asserted_id=coin_2.name(), asserted_msg=b"test") @@ -1390,7 +1390,7 @@ def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: @pytest.mark.anyio async def test_invalid_coin_announcement_rejected_two( - self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool ) -> None: def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: announce = AssertCoinAnnouncement(asserted_id=coin_1.name(), asserted_msg=b"test") @@ -1417,7 +1417,7 @@ def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: @pytest.mark.anyio async def test_correct_puzzle_announcement( - self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool ) -> None: def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: announce = AssertPuzzleAnnouncement(asserted_ph=coin_2.puzzle_hash, asserted_msg=bytes(0x80)) @@ -1457,7 +1457,7 @@ async def test_puzzle_announcement_garbage( announce_garbage: bool, expected: Optional[Err], expected_included: MempoolInclusionStatus, - one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], + one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool, ) -> None: def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: @@ -1492,7 +1492,7 @@ def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: @pytest.mark.anyio async def test_puzzle_announcement_missing_arg( - self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool ) -> None: def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: # missing arg here @@ -1519,7 +1519,7 @@ def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: @pytest.mark.anyio async def test_puzzle_announcement_missing_arg2( - self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool ) -> None: def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: announce = AssertPuzzleAnnouncement(asserted_ph=coin_2.puzzle_hash, asserted_msg=b"test") @@ -1547,7 +1547,7 @@ def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: @pytest.mark.anyio async def test_invalid_puzzle_announcement_rejected( - self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool ) -> None: def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: announce = AssertPuzzleAnnouncement(asserted_ph=coin_2.puzzle_hash, asserted_msg=bytes("test", "utf-8")) @@ -1577,7 +1577,7 @@ def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: @pytest.mark.anyio async def test_invalid_puzzle_announcement_rejected_two( - self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool ) -> None: def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: announce = AssertPuzzleAnnouncement(asserted_ph=coin_2.puzzle_hash, asserted_msg=bytes(0x80)) @@ -1607,7 +1607,7 @@ def test_fun(coin_1: Coin, coin_2: Coin) -> SpendBundle: @pytest.mark.anyio async def test_assert_fee_condition( - self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool ) -> None: full_node_1, server_1, bt = one_node_one_block cvp = ConditionWithArgs(ConditionOpcode.RESERVE_FEE, [int_to_bytes(10)]) @@ -1623,7 +1623,7 @@ async def test_assert_fee_condition( @pytest.mark.anyio async def test_assert_fee_condition_garbage( - self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool ) -> None: full_node_1, server_1, bt = one_node_one_block # garbage at the end of the arguments is ignored in consensus mode, but @@ -1641,7 +1641,7 @@ async def test_assert_fee_condition_garbage( @pytest.mark.anyio async def test_assert_fee_condition_missing_arg( - self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool ) -> None: full_node_1, server_1, bt = one_node_one_block cvp = ConditionWithArgs(ConditionOpcode.RESERVE_FEE, []) @@ -1657,7 +1657,7 @@ async def test_assert_fee_condition_missing_arg( @pytest.mark.anyio async def test_assert_fee_condition_negative_fee( - self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool ) -> None: full_node_1, server_1, bt = one_node_one_block cvp = ConditionWithArgs(ConditionOpcode.RESERVE_FEE, [int_to_bytes(-1)]) @@ -1677,7 +1677,7 @@ async def test_assert_fee_condition_negative_fee( @pytest.mark.anyio async def test_assert_fee_condition_fee_too_large( - self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool ) -> None: full_node_1, server_1, bt = one_node_one_block cvp = ConditionWithArgs(ConditionOpcode.RESERVE_FEE, [int_to_bytes(2**64)]) @@ -1697,7 +1697,7 @@ async def test_assert_fee_condition_fee_too_large( @pytest.mark.anyio async def test_assert_fee_condition_wrong_fee( - self, one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool + self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool ) -> None: full_node_1, server_1, bt = one_node_one_block @@ -1713,7 +1713,7 @@ async def test_assert_fee_condition_wrong_fee( @pytest.mark.anyio async def test_stealing_fee( self, - two_nodes_one_block: Tuple[FullNodeSimulator, FullNodeSimulator, ChiaServer, ChiaServer, BlockTools], + two_nodes_one_block: tuple[FullNodeSimulator, FullNodeSimulator, ChiaServer, ChiaServer, BlockTools], wallet_a: WalletTool, ) -> None: reward_ph = wallet_a.get_new_puzzlehash() @@ -1774,7 +1774,7 @@ async def test_stealing_fee( @pytest.mark.anyio async def test_double_spend_same_bundle( self, - two_nodes_one_block: Tuple[FullNodeSimulator, FullNodeSimulator, ChiaServer, ChiaServer, BlockTools], + two_nodes_one_block: tuple[FullNodeSimulator, FullNodeSimulator, ChiaServer, ChiaServer, BlockTools], wallet_a: WalletTool, ) -> None: reward_ph = wallet_a.get_new_puzzlehash() @@ -1823,7 +1823,7 @@ async def test_double_spend_same_bundle( @pytest.mark.anyio async def test_agg_sig_condition( self, - one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], + one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool, ) -> None: reward_ph = wallet_a.get_new_puzzlehash() @@ -1846,7 +1846,7 @@ async def test_agg_sig_condition( coin = await next_block(full_node_1, wallet_a, bt) # coin = blocks[-1].get_included_reward_coins()[0] spend_bundle_0 = generate_test_spend_bundle(wallet_a, coin) - unsigned: List[CoinSpend] = spend_bundle_0.coin_spends + unsigned: list[CoinSpend] = spend_bundle_0.coin_spends assert len(unsigned) == 1 # coin_spend: CoinSpend = unsigned[0] @@ -1872,7 +1872,7 @@ async def test_agg_sig_condition( @pytest.mark.anyio async def test_correct_my_parent( self, - one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], + one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool, ) -> None: full_node_1, server_1, bt = one_node_one_block @@ -1895,7 +1895,7 @@ async def test_correct_my_parent( @pytest.mark.anyio async def test_my_parent_garbage( self, - one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], + one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool, ) -> None: full_node_1, server_1, bt = one_node_one_block @@ -1920,7 +1920,7 @@ async def test_my_parent_garbage( @pytest.mark.anyio async def test_my_parent_missing_arg( self, - one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], + one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool, ) -> None: full_node_1, server_1, bt = one_node_one_block @@ -1937,7 +1937,7 @@ async def test_my_parent_missing_arg( @pytest.mark.anyio async def test_invalid_my_parent( self, - one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], + one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool, ) -> None: full_node_1, server_1, bt = one_node_one_block @@ -1961,7 +1961,7 @@ async def test_invalid_my_parent( @pytest.mark.anyio async def test_correct_my_puzhash( self, - one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], + one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool, ) -> None: full_node_1, server_1, bt = one_node_one_block @@ -1984,7 +1984,7 @@ async def test_correct_my_puzhash( @pytest.mark.anyio async def test_my_puzhash_garbage( self, - one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], + one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool, ) -> None: full_node_1, server_1, bt = one_node_one_block @@ -2008,7 +2008,7 @@ async def test_my_puzhash_garbage( @pytest.mark.anyio async def test_my_puzhash_missing_arg( self, - one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], + one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool, ) -> None: full_node_1, server_1, bt = one_node_one_block @@ -2025,7 +2025,7 @@ async def test_my_puzhash_missing_arg( @pytest.mark.anyio async def test_invalid_my_puzhash( self, - one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], + one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool, ) -> None: full_node_1, server_1, bt = one_node_one_block @@ -2048,7 +2048,7 @@ async def test_invalid_my_puzhash( @pytest.mark.anyio async def test_correct_my_amount( self, - one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], + one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool, ) -> None: full_node_1, server_1, bt = one_node_one_block @@ -2071,7 +2071,7 @@ async def test_correct_my_amount( @pytest.mark.anyio async def test_my_amount_garbage( self, - one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], + one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool, ) -> None: full_node_1, server_1, bt = one_node_one_block @@ -2096,7 +2096,7 @@ async def test_my_amount_garbage( @pytest.mark.anyio async def test_my_amount_missing_arg( self, - one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], + one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool, ) -> None: full_node_1, server_1, bt = one_node_one_block @@ -2113,7 +2113,7 @@ async def test_my_amount_missing_arg( @pytest.mark.anyio async def test_invalid_my_amount( self, - one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], + one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool, ) -> None: full_node_1, server_1, bt = one_node_one_block @@ -2130,7 +2130,7 @@ async def test_invalid_my_amount( @pytest.mark.anyio async def test_negative_my_amount( self, - one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], + one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool, ) -> None: full_node_1, server_1, bt = one_node_one_block @@ -2147,7 +2147,7 @@ async def test_negative_my_amount( @pytest.mark.anyio async def test_my_amount_too_large( self, - one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], + one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool, ) -> None: full_node_1, _, _ = one_node_one_block @@ -2832,7 +2832,7 @@ def test_many_create_coin(self, softfork_height: uint32, benchmark_runner: Bench @pytest.mark.anyio async def test_invalid_coin_spend_coin( self, - one_node_one_block: Tuple[FullNodeSimulator, ChiaServer, BlockTools], + one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool, ) -> None: full_node_1, _, bt = one_node_one_block @@ -2898,7 +2898,7 @@ async def test_invalid_coin_spend_coin( ), ], ) -def test_items_by_feerate(items: List[MempoolItem], expected: List[Coin]) -> None: +def test_items_by_feerate(items: list[MempoolItem], expected: list[Coin]) -> None: fee_estimator = create_bitcoin_fee_estimator(uint64(11000000000)) mempool_info = MempoolInfo( @@ -2951,7 +2951,7 @@ def item_cost(cost: int, fee_rate: float) -> MempoolItem: ([75, 15, 9], 10, [10, 75, 15]), ], ) -def test_full_mempool(items: List[int], add: int, expected: List[int]) -> None: +def test_full_mempool(items: list[int], add: int, expected: list[int]) -> None: fee_estimator = create_bitcoin_fee_estimator(uint64(11000000000)) mempool_info = MempoolInfo( @@ -2998,7 +2998,7 @@ def test_full_mempool(items: List[int], add: int, expected: List[int]) -> None: ([10, 11, 12, 13, 50], [10, 11, 12, 13], False), ], ) -def test_limit_expiring_transactions(height: bool, items: List[int], expected: List[int], increase_fee: bool) -> None: +def test_limit_expiring_transactions(height: bool, items: list[int], expected: list[int], increase_fee: bool) -> None: fee_estimator = create_bitcoin_fee_estimator(uint64(11000000000)) mempool_info = MempoolInfo( @@ -3077,7 +3077,7 @@ def test_limit_expiring_transactions(height: bool, items: List[int], expected: L ), ], ) -def test_get_items_by_coin_ids(items: List[MempoolItem], coin_ids: List[bytes32], expected: List[MempoolItem]) -> None: +def test_get_items_by_coin_ids(items: list[MempoolItem], coin_ids: list[bytes32], expected: list[MempoolItem]) -> None: fee_estimator = create_bitcoin_fee_estimator(uint64(11000000000)) mempool_info = MempoolInfo( CLVMCost(uint64(11000000000 * 3)), @@ -3110,7 +3110,7 @@ def make_test_spendbundle(coin: Coin, *, fee: int = 0, with_higher_cost: bool = sb = spend_bundle_from_conditions(conditions, coin) return sb - def agg_and_add_sb_returning_cost_info(mempool: Mempool, spend_bundles: List[SpendBundle]) -> uint64: + def agg_and_add_sb_returning_cost_info(mempool: Mempool, spend_bundles: list[SpendBundle]) -> uint64: sb = SpendBundle.aggregate(spend_bundles) mi = mempool_item_from_spendbundle(sb) mempool.add_to_pool(mi) diff --git a/chia/_tests/core/mempool/test_mempool_fee_protocol.py b/chia/_tests/core/mempool/test_mempool_fee_protocol.py index 5742699cb70c..8704afc98b12 100644 --- a/chia/_tests/core/mempool/test_mempool_fee_protocol.py +++ b/chia/_tests/core/mempool/test_mempool_fee_protocol.py @@ -1,7 +1,7 @@ from __future__ import annotations import datetime -from typing import List, Tuple, Union +from typing import Union import pytest @@ -20,8 +20,8 @@ @pytest.mark.anyio async def test_protocol_messages( - simulator_and_wallet: Tuple[ - List[Union[FullNodeAPI, FullNodeSimulator]], List[Tuple[Wallet, ChiaServer]], BlockTools + simulator_and_wallet: tuple[ + list[Union[FullNodeAPI, FullNodeSimulator]], list[tuple[Wallet, ChiaServer]], BlockTools ] ) -> None: full_nodes, wallets, bt = simulator_and_wallet diff --git a/chia/_tests/core/mempool/test_mempool_item_queries.py b/chia/_tests/core/mempool/test_mempool_item_queries.py index 898c9453bdbe..8560473e9b03 100644 --- a/chia/_tests/core/mempool/test_mempool_item_queries.py +++ b/chia/_tests/core/mempool/test_mempool_item_queries.py @@ -1,7 +1,5 @@ from __future__ import annotations -from typing import List - from chia_rs import AugSchemeMPL, Coin, Program from chia_rs.sized_bytes import bytes32 from chia_rs.sized_ints import uint32, uint64 @@ -43,7 +41,7 @@ EMPTY_SIGNATURE = AugSchemeMPL.aggregate([]) -def make_item(coin_spends: List[CoinSpend]) -> MempoolItem: +def make_item(coin_spends: list[CoinSpend]) -> MempoolItem: spend_bundle = SpendBundle(coin_spends, EMPTY_SIGNATURE) generator = simple_solution_generator(spend_bundle) npc_result = get_name_puzzle_conditions( diff --git a/chia/_tests/core/mempool/test_mempool_manager.py b/chia/_tests/core/mempool/test_mempool_manager.py index 8e0796202aad..0db312df78b1 100644 --- a/chia/_tests/core/mempool/test_mempool_manager.py +++ b/chia/_tests/core/mempool/test_mempool_manager.py @@ -2,7 +2,8 @@ import dataclasses import logging -from typing import Any, Awaitable, Callable, Collection, Dict, List, Optional, Set, Tuple +from collections.abc import Awaitable, Collection +from typing import Any, Callable, Optional import pytest from chia_rs import ELIGIBLE_FOR_DEDUP, ELIGIBLE_FOR_FF, AugSchemeMPL, G2Element, get_conditions_from_spendbundle @@ -96,19 +97,19 @@ def is_transaction_block(self) -> bool: return self.timestamp is not None -async def zero_calls_get_coin_records(coin_ids: Collection[bytes32]) -> List[CoinRecord]: +async def zero_calls_get_coin_records(coin_ids: Collection[bytes32]) -> list[CoinRecord]: assert len(coin_ids) == 0 return [] -async def get_coin_records_for_test_coins(coin_ids: Collection[bytes32]) -> List[CoinRecord]: +async def get_coin_records_for_test_coins(coin_ids: Collection[bytes32]) -> list[CoinRecord]: test_coin_records = { TEST_COIN_ID: TEST_COIN_RECORD, TEST_COIN_ID2: TEST_COIN_RECORD2, TEST_COIN_ID3: TEST_COIN_RECORD3, } - ret: List[CoinRecord] = [] + ret: list[CoinRecord] = [] for name in coin_ids: r = test_coin_records.get(name) if r is not None: @@ -131,7 +132,7 @@ def create_test_block_record(*, height: uint32 = TEST_HEIGHT, timestamp: uint64 async def instantiate_mempool_manager( - get_coin_records: Callable[[Collection[bytes32]], Awaitable[List[CoinRecord]]], + get_coin_records: Callable[[Collection[bytes32]], Awaitable[list[CoinRecord]]], *, block_height: uint32 = TEST_HEIGHT, block_timestamp: uint64 = TEST_TIMESTAMP, @@ -147,11 +148,11 @@ async def instantiate_mempool_manager( async def setup_mempool_with_coins( *, - coin_amounts: List[int], + coin_amounts: list[int], max_block_clvm_cost: Optional[int] = None, max_tx_clvm_cost: Optional[uint64] = None, mempool_block_buffer: Optional[int] = None, -) -> Tuple[MempoolManager, List[Coin]]: +) -> tuple[MempoolManager, list[Coin]]: coins = [] test_coin_records = {} for amount in coin_amounts: @@ -159,8 +160,8 @@ async def setup_mempool_with_coins( coins.append(coin) test_coin_records[coin.name()] = CoinRecord(coin, uint32(0), uint32(0), False, uint64(0)) - async def get_coin_records(coin_ids: Collection[bytes32]) -> List[CoinRecord]: - ret: List[CoinRecord] = [] + async def get_coin_records(coin_ids: Collection[bytes32]) -> list[CoinRecord]: + ret: list[CoinRecord] = [] for name in coin_ids: r = test_coin_records.get(name) if r is not None: @@ -191,7 +192,7 @@ def make_test_conds( before_seconds_relative: Optional[int] = None, before_seconds_absolute: Optional[int] = None, cost: int = 0, - spend_ids: List[bytes32] = [TEST_COIN_ID], + spend_ids: list[bytes32] = [TEST_COIN_ID], ) -> SpendBundleConditions: return SpendBundleConditions( [ @@ -244,7 +245,7 @@ class TestCheckTimeLocks: coinbase=False, timestamp=COIN_TIMESTAMP, ) - REMOVALS: Dict[bytes32, CoinRecord] = {TEST_COIN.name(): COIN_RECORD} + REMOVALS: dict[bytes32, CoinRecord] = {TEST_COIN.name(): COIN_RECORD} @pytest.mark.parametrize( "conds,expected", @@ -379,7 +380,7 @@ def test_compute_assert_height(conds: SpendBundleConditions, expected: TimelockC def spend_bundle_from_conditions( - conditions: List[List[Any]], coin: Coin = TEST_COIN, aggsig: G2Element = G2Element() + conditions: list[list[Any]], coin: Coin = TEST_COIN, aggsig: G2Element = G2Element() ) -> SpendBundle: solution = SerializedProgram.to(conditions) coin_spend = make_spend(coin, IDENTITY_PUZZLE, solution) @@ -388,7 +389,7 @@ def spend_bundle_from_conditions( async def add_spendbundle( mempool_manager: MempoolManager, sb: SpendBundle, sb_name: bytes32 -) -> Tuple[Optional[uint64], MempoolInclusionStatus, Optional[Err]]: +) -> tuple[Optional[uint64], MempoolInclusionStatus, Optional[Err]]: sbc = await mempool_manager.pre_validate_spendbundle(sb, sb_name) ret = await mempool_manager.add_spend_bundle(sb, sbc, sb_name, TEST_HEIGHT) invariant_check_mempool(mempool_manager.mempool) @@ -397,10 +398,10 @@ async def add_spendbundle( async def generate_and_add_spendbundle( mempool_manager: MempoolManager, - conditions: List[List[Any]], + conditions: list[list[Any]], coin: Coin = TEST_COIN, aggsig: G2Element = G2Element(), -) -> Tuple[SpendBundle, bytes32, Tuple[Optional[uint64], MempoolInclusionStatus, Optional[Err]]]: +) -> tuple[SpendBundle, bytes32, tuple[Optional[uint64], MempoolInclusionStatus, Optional[Err]]]: sb = spend_bundle_from_conditions(conditions, coin, aggsig) sb_name = sb.name() result = await add_spendbundle(mempool_manager, sb, sb_name) @@ -409,9 +410,9 @@ async def generate_and_add_spendbundle( def make_bundle_spends_map_and_fee( spend_bundle: SpendBundle, conds: SpendBundleConditions -) -> Tuple[Dict[bytes32, BundleCoinSpend], uint64]: - bundle_coin_spends: Dict[bytes32, BundleCoinSpend] = {} - eligibility_and_additions: Dict[bytes32, EligibilityAndAdditions] = {} +) -> tuple[dict[bytes32, BundleCoinSpend], uint64]: + bundle_coin_spends: dict[bytes32, BundleCoinSpend] = {} + eligibility_and_additions: dict[bytes32, EligibilityAndAdditions] = {} removals_amount = 0 additions_amount = 0 for spend in conds.spends: @@ -551,7 +552,7 @@ async def test_reserve_fee_condition() -> None: @pytest.mark.anyio async def test_unknown_unspent() -> None: - async def get_coin_records(_: Collection[bytes32]) -> List[CoinRecord]: + async def get_coin_records(_: Collection[bytes32]) -> list[CoinRecord]: return [] mempool_manager = await instantiate_mempool_manager(get_coin_records) @@ -598,7 +599,7 @@ async def test_sb_twice_with_eligible_coin_and_different_spends_order() -> None: sk = AugSchemeMPL.key_gen(b"6" * 32) g1 = sk.get_g1() sig = AugSchemeMPL.sign(sk, IDENTITY_PUZZLE_HASH, g1) - sb2_conditions: List[List[Any]] = [ + sb2_conditions: list[list[Any]] = [ [ConditionOpcode.CREATE_COIN, IDENTITY_PUZZLE_HASH, 3], [ConditionOpcode.AGG_SIG_UNSAFE, bytes(g1), IDENTITY_PUZZLE_HASH], ] @@ -726,7 +727,7 @@ def test_optional_max() -> None: def mk_item( - coins: List[Coin], + coins: list[Coin], *, cost: int = 1, fee: int = 0, @@ -762,8 +763,8 @@ def mk_item( ) -def make_test_coins() -> List[Coin]: - ret: List[Coin] = [] +def make_test_coins() -> list[Coin]: + ret: list[Coin] = [] for i in range(5): ret.append(Coin(height_hash(i), height_hash(i + 100), uint64(i * 100))) return ret @@ -904,7 +905,7 @@ def make_test_coins() -> List[Coin]: ), ], ) -def test_can_replace(existing_items: List[MempoolItem], new_item: MempoolItem, expected: bool) -> None: +def test_can_replace(existing_items: list[MempoolItem], new_item: MempoolItem, expected: bool) -> None: removals = {c.name() for c in new_item.spend_bundle.removals()} assert can_replace(existing_items, removals, new_item) == expected @@ -956,10 +957,10 @@ async def test_get_items_not_in_filter() -> None: @pytest.mark.anyio async def test_total_mempool_fees() -> None: - coin_records: Dict[bytes32, CoinRecord] = {} + coin_records: dict[bytes32, CoinRecord] = {} - async def get_coin_records(coin_ids: Collection[bytes32]) -> List[CoinRecord]: - ret: List[CoinRecord] = [] + async def get_coin_records(coin_ids: Collection[bytes32]) -> list[CoinRecord]: + ret: list[CoinRecord] = [] for name in coin_ids: r = coin_records.get(name) if r is not None: @@ -997,7 +998,7 @@ async def test_create_bundle_from_mempool(reverse_tx_order: bool) -> None: async def get_unspent_lineage_info_for_puzzle_hash(_: bytes32) -> Optional[UnspentLineageInfo]: assert False # pragma: no cover - async def make_coin_spends(coins: List[Coin], *, high_fees: bool = True) -> List[CoinSpend]: + async def make_coin_spends(coins: list[Coin], *, high_fees: bool = True) -> list[CoinSpend]: spends_list = [] for i in range(0, len(coins)): coin_spend = make_spend( @@ -1010,7 +1011,7 @@ async def make_coin_spends(coins: List[Coin], *, high_fees: bool = True) -> List spends_list.append(coin_spend) return spends_list - async def send_spends_to_mempool(coin_spends: List[CoinSpend]) -> None: + async def send_spends_to_mempool(coin_spends: list[CoinSpend]) -> None: g2 = G2Element() for cs in coin_spends: sb = SpendBundle([cs], g2) @@ -1149,9 +1150,9 @@ async def make_and_send_big_cost_sb(coin: Coin) -> None: async def test_assert_before_expiration( opcode: ConditionOpcode, arg: int, expect_eviction: bool, expect_limit: Optional[int] ) -> None: - async def get_coin_records(coin_ids: Collection[bytes32]) -> List[CoinRecord]: + async def get_coin_records(coin_ids: Collection[bytes32]) -> list[CoinRecord]: all_coins = {TEST_COIN.name(): CoinRecord(TEST_COIN, uint32(5), uint32(0), False, uint64(9900))} - ret: List[CoinRecord] = [] + ret: list[CoinRecord] = [] for name in coin_ids: r = all_coins.get(name) if r is not None: @@ -1209,7 +1210,7 @@ def make_test_spendbundle(coin: Coin, *, fee: int = 0, eligible_spend: bool = Fa async def send_spendbundle( mempool_manager: MempoolManager, sb: SpendBundle, - expected_result: Tuple[MempoolInclusionStatus, Optional[Err]] = (MempoolInclusionStatus.SUCCESS, None), + expected_result: tuple[MempoolInclusionStatus, Optional[Err]] = (MempoolInclusionStatus.SUCCESS, None), ) -> None: result = await add_spendbundle(mempool_manager, sb, sb.name()) assert (result[1], result[2]) == expected_result @@ -1220,7 +1221,7 @@ async def make_and_send_spendbundle( coin: Coin, *, fee: int = 0, - expected_result: Tuple[MempoolInclusionStatus, Optional[Err]] = (MempoolInclusionStatus.SUCCESS, None), + expected_result: tuple[MempoolInclusionStatus, Optional[Err]] = (MempoolInclusionStatus.SUCCESS, None), ) -> SpendBundle: sb = make_test_spendbundle(coin, fee=fee) await send_spendbundle(mempool_manager, sb, expected_result) @@ -1529,8 +1530,8 @@ async def test_coin_spending_different_ways_then_finding_it_spent_in_new_peak(ne coin_id = coin.name() test_coin_records = {coin_id: CoinRecord(coin, uint32(0), uint32(0), False, uint64(0))} - async def get_coin_records(coin_ids: Collection[bytes32]) -> List[CoinRecord]: - ret: List[CoinRecord] = [] + async def get_coin_records(coin_ids: Collection[bytes32]) -> list[CoinRecord]: + ret: list[CoinRecord] = [] for name in coin_ids: r = test_coin_records.get(name) if r is not None: @@ -1592,7 +1593,7 @@ async def test_identical_spend_aggregation_e2e( def get_sb_names_by_coin_id( full_node_api: FullNodeSimulator, spent_coin_id: bytes32, - ) -> Set[bytes32]: + ) -> set[bytes32]: return { i.spend_bundle_name for i in full_node_api.full_node.mempool_manager.mempool.get_items_by_coin_id(spent_coin_id) @@ -1616,7 +1617,7 @@ async def farm_a_block(full_node_api: FullNodeSimulator, wallet_node: WalletNode async def make_setup_and_coins( full_node_api: FullNodeSimulator, wallet_node: WalletNode - ) -> Tuple[Wallet, list[WalletCoinRecord], bytes32]: + ) -> tuple[Wallet, list[WalletCoinRecord], bytes32]: wallet = wallet_node.wallet_state_manager.main_wallet ph = await wallet.get_new_puzzlehash() phs = [await wallet.get_new_puzzlehash() for _ in range(3)] @@ -1712,7 +1713,7 @@ async def make_setup_and_coins( assert tx_f.spend_bundle is not None # Create transaction E now that spends e_coin to create another eligible # coin as well as the announcement consumed by D and F - conditions: List[List[Any]] = [ + conditions: list[list[Any]] = [ [ConditionOpcode.CREATE_COIN, IDENTITY_PUZZLE_HASH, 42], [ConditionOpcode.CREATE_COIN_ANNOUNCEMENT, message], ] @@ -1898,7 +1899,7 @@ async def make_setup_and_coins( ), ], ) -async def test_mempool_timelocks(cond1: List[object], cond2: List[object], expected: Optional[Err]) -> None: +async def test_mempool_timelocks(cond1: list[object], cond2: list[object], expected: Optional[Err]) -> None: coins = [] test_coin_records = {} @@ -1909,8 +1910,8 @@ async def test_mempool_timelocks(cond1: List[object], cond2: List[object], expec coins.append(coin) test_coin_records[coin.name()] = CoinRecord(coin, uint32(20), uint32(0), False, uint64(2000)) - async def get_coin_records(coin_ids: Collection[bytes32]) -> List[CoinRecord]: - ret: List[CoinRecord] = [] + async def get_coin_records(coin_ids: Collection[bytes32]) -> list[CoinRecord]: + ret: list[CoinRecord] = [] for name in coin_ids: r = test_coin_records.get(name) if r is not None: @@ -1987,7 +1988,7 @@ async def send_to_mempool(full_node: FullNodeSimulator, spend_bundle: SpendBundl async def fill_mempool_with_test_sbs( full_node_api: FullNodeSimulator, - ) -> List[Tuple[bytes32, SerializedProgram, bytes32]]: + ) -> list[tuple[bytes32, SerializedProgram, bytes32]]: coins_and_puzzles = [] # Create different puzzles and use different (parent) coins to reduce # the effects of block compression as much as possible. diff --git a/chia/_tests/core/mempool/test_mempool_performance.py b/chia/_tests/core/mempool/test_mempool_performance.py index 31e879818059..54e3515780bf 100644 --- a/chia/_tests/core/mempool/test_mempool_performance.py +++ b/chia/_tests/core/mempool/test_mempool_performance.py @@ -1,7 +1,5 @@ from __future__ import annotations -from typing import List - import pytest from chia._tests.util.misc import BenchmarkRunner, add_blocks_in_batches, wallet_height_at_least @@ -24,7 +22,7 @@ async def wallet_balance_at_least(wallet_node: WalletNode, balance: uint128) -> @pytest.mark.anyio async def test_mempool_update_performance( wallet_nodes_mempool_perf: OldSimulatorsAndWallets, - default_400_blocks: List[FullBlock], + default_400_blocks: list[FullBlock], self_hostname: str, benchmark_runner: BenchmarkRunner, ) -> None: diff --git a/chia/_tests/core/mempool/test_singleton_fast_forward.py b/chia/_tests/core/mempool/test_singleton_fast_forward.py index 8fc4eae2691e..f53b759886c9 100644 --- a/chia/_tests/core/mempool/test_singleton_fast_forward.py +++ b/chia/_tests/core/mempool/test_singleton_fast_forward.py @@ -1,7 +1,7 @@ from __future__ import annotations import dataclasses -from typing import Any, Dict, List, Optional, Tuple +from typing import Any, Optional import pytest from chia_rs import AugSchemeMPL, G1Element, G2Element, PrivateKey @@ -206,7 +206,7 @@ def test_perform_the_fast_forward() -> None: parent_parent_id=test_child_coin.parent_coin_info, ) # Start from a fresh state of fast forward spends - fast_forward_spends: Dict[bytes32, UnspentLineageInfo] = {} + fast_forward_spends: dict[bytes32, UnspentLineageInfo] = {} # Perform the fast forward on the test coin (the grandparent) new_coin_spend, patched_additions = perform_the_fast_forward( test_unspent_lineage_info, test_spend_data, fast_forward_spends @@ -246,14 +246,14 @@ def sign_delegated_puz(del_puz: Program, coin: Coin) -> G2Element: async def make_and_send_spend_bundle( sim: SpendSim, sim_client: SimClient, - coin_spends: List[CoinSpend], + coin_spends: list[CoinSpend], is_eligible_for_ff: bool = True, *, is_launcher_coin: bool = False, signing_puzzle: Optional[Program] = None, signing_coin: Optional[Coin] = None, aggsig: G2Element = G2Element(), -) -> Tuple[MempoolInclusionStatus, Optional[Err]]: +) -> tuple[MempoolInclusionStatus, Optional[Err]]: if is_launcher_coin or not is_eligible_for_ff: assert signing_puzzle is not None assert signing_coin is not None @@ -268,7 +268,7 @@ async def make_and_send_spend_bundle( return status, error -async def get_singleton_and_remaining_coins(sim: SpendSim) -> Tuple[Coin, List[Coin]]: +async def get_singleton_and_remaining_coins(sim: SpendSim) -> tuple[Coin, list[Coin]]: coins = await sim.all_non_reward_coins() singletons = [coin for coin in coins if coin.amount & 1] assert len(singletons) == 1 @@ -281,9 +281,9 @@ def make_singleton_coin_spend( parent_coin_spend: CoinSpend, coin_to_spend: Coin, inner_puzzle: Program, - inner_conditions: List[List[Any]], + inner_conditions: list[list[Any]], is_eve_spend: bool = False, -) -> Tuple[CoinSpend, Program]: +) -> tuple[CoinSpend, Program]: lineage_proof = singleton_top_layer.lineage_proof_for_coinsol(parent_coin_spend) delegated_puzzle = Program.to((1, inner_conditions)) inner_solution = Program.to([[], delegated_puzzle, []]) @@ -300,7 +300,7 @@ def make_singleton_coin_spend( async def prepare_singleton_eve( sim: SpendSim, sim_client: SimClient, is_eligible_for_ff: bool, start_amount: uint64, singleton_amount: uint64 -) -> Tuple[Program, CoinSpend, Program]: +) -> tuple[Program, CoinSpend, Program]: # Generate starting info key_lookup = KeyTool() pk = G1Element.from_bytes(public_key_for_index(1, key_lookup)) @@ -350,7 +350,7 @@ async def prepare_singleton_eve( async def prepare_and_test_singleton( sim: SpendSim, sim_client: SimClient, is_eligible_for_ff: bool, start_amount: uint64, singleton_amount: uint64 -) -> Tuple[Coin, CoinSpend, Program, Coin]: +) -> tuple[Coin, CoinSpend, Program, Coin]: inner_puzzle, eve_coin_spend, eve_signing_puzzle = await prepare_singleton_eve( sim, sim_client, is_eligible_for_ff, start_amount, singleton_amount ) @@ -407,7 +407,7 @@ async def test_singleton_fast_forward_different_block(is_eligible_for_ff: bool) sk = AugSchemeMPL.key_gen(b"1" * 32) g1 = sk.get_g1() sig = AugSchemeMPL.sign(sk, b"foobar", g1) - inner_conditions: List[List[Any]] = [ + inner_conditions: list[list[Any]] = [ [ConditionOpcode.AGG_SIG_UNSAFE, bytes(g1), b"foobar"], [ConditionOpcode.CREATE_COIN, inner_puzzle_hash, SINGLETON_CHILD_AMOUNT], ] @@ -501,7 +501,7 @@ async def test_singleton_fast_forward_same_block() -> None: sk = AugSchemeMPL.key_gen(b"9" * 32) g1 = sk.get_g1() sig = AugSchemeMPL.sign(sk, b"foobar", g1) - inner_conditions: List[List[Any]] = [ + inner_conditions: list[list[Any]] = [ [ConditionOpcode.AGG_SIG_UNSAFE, bytes(g1), b"foobar"], [ConditionOpcode.CREATE_COIN, inner_puzzle_hash, SINGLETON_CHILD_AMOUNT], ] diff --git a/chia/_tests/core/server/serve.py b/chia/_tests/core/server/serve.py index 533c03290aea..4d1627589b70 100644 --- a/chia/_tests/core/server/serve.py +++ b/chia/_tests/core/server/serve.py @@ -9,7 +9,7 @@ import pathlib import sys import threading -from typing import List, Optional, final, overload +from typing import Optional, final, overload from chia._tests.util.misc import create_logger from chia.server.chia_policy import ChiaPolicy @@ -48,7 +48,7 @@ async def async_main( shutdown_path: pathlib.Path, ip: str = "127.0.0.1", port: int = 8444, - port_holder: Optional[List[int]] = None, + port_holder: Optional[list[int]] = None, ) -> None: ... @@ -59,7 +59,7 @@ async def async_main( thread_end_event: threading.Event, ip: str = "127.0.0.1", port: int = 8444, - port_holder: Optional[List[int]] = None, + port_holder: Optional[list[int]] = None, ) -> None: ... @@ -70,7 +70,7 @@ async def async_main( thread_end_event: Optional[threading.Event] = None, ip: str = "127.0.0.1", port: int = 8444, - port_holder: Optional[List[int]] = None, + port_holder: Optional[list[int]] = None, ) -> None: with out_path.open(mode="w") as file: logger = create_logger(file=file) diff --git a/chia/_tests/core/server/test_capabilities.py b/chia/_tests/core/server/test_capabilities.py index ac6d00725a75..564cf9867faf 100644 --- a/chia/_tests/core/server/test_capabilities.py +++ b/chia/_tests/core/server/test_capabilities.py @@ -1,7 +1,5 @@ from __future__ import annotations -from typing import List, Tuple - import pytest from chia.protocols.shared_protocol import Capability @@ -53,8 +51,8 @@ ids=lambda value: "disabled" if value else "enabled", ) def test_known_active_capabilities_filter( - values: List[Tuple[uint16, str]], - expected: List[Capability], + values: list[tuple[uint16, str]], + expected: list[Capability], duplicated: bool, disabled: bool, ) -> None: diff --git a/chia/_tests/core/server/test_dos.py b/chia/_tests/core/server/test_dos.py index 6003c4cd64f4..3bec21ba1162 100644 --- a/chia/_tests/core/server/test_dos.py +++ b/chia/_tests/core/server/test_dos.py @@ -4,7 +4,6 @@ import asyncio import logging import time -from typing import List, Tuple import pytest from aiohttp import ClientSession, ClientTimeout, WSCloseCode, WSMessage, WSMsgType, WSServerHandshakeError @@ -53,7 +52,7 @@ class TestDos: @pytest.mark.anyio async def test_banned_host_can_not_connect( self, - setup_two_nodes_fixture: Tuple[List[FullNodeSimulator], List[Tuple[WalletNode, ChiaServer]], BlockTools], + setup_two_nodes_fixture: tuple[list[FullNodeSimulator], list[tuple[WalletNode, ChiaServer]], BlockTools], self_hostname: str, monkeypatch: pytest.MonkeyPatch, ) -> None: @@ -77,7 +76,7 @@ async def test_banned_host_can_not_connect( @pytest.mark.anyio async def test_large_message_disconnect_and_ban( self, - setup_two_nodes_fixture: Tuple[List[FullNodeSimulator], List[Tuple[WalletNode, ChiaServer]], BlockTools], + setup_two_nodes_fixture: tuple[list[FullNodeSimulator], list[tuple[WalletNode, ChiaServer]], BlockTools], self_hostname: str, monkeypatch: pytest.MonkeyPatch, ) -> None: @@ -112,7 +111,7 @@ async def test_large_message_disconnect_and_ban( @pytest.mark.anyio async def test_bad_handshake_and_ban( self, - setup_two_nodes_fixture: Tuple[List[FullNodeSimulator], List[Tuple[WalletNode, ChiaServer]], BlockTools], + setup_two_nodes_fixture: tuple[list[FullNodeSimulator], list[tuple[WalletNode, ChiaServer]], BlockTools], self_hostname: str, monkeypatch: pytest.MonkeyPatch, ) -> None: diff --git a/chia/_tests/core/server/test_loop.py b/chia/_tests/core/server/test_loop.py index aad8d75f06a9..515b9d401561 100644 --- a/chia/_tests/core/server/test_loop.py +++ b/chia/_tests/core/server/test_loop.py @@ -8,8 +8,9 @@ import subprocess import sys import threading +from collections.abc import AsyncIterator from dataclasses import dataclass, field -from typing import AsyncIterator, List, Optional +from typing import Optional import anyio import pytest @@ -57,8 +58,8 @@ async def open(cls, ip: str, port: int) -> Client: @classmethod @contextlib.asynccontextmanager - async def open_several(cls, count: int, ip: str, port: int) -> AsyncIterator[List[Client]]: - clients: List[Client] = await asyncio.gather(*(cls.open(ip=ip, port=port) for _ in range(count))) + async def open_several(cls, count: int, ip: str, port: int) -> AsyncIterator[list[Client]]: + clients: list[Client] = await asyncio.gather(*(cls.open(ip=ip, port=port) for _ in range(count))) try: yield [*clients] finally: @@ -98,7 +99,7 @@ class ServeInThread: server_task: Optional[asyncio.Task[None]] = None thread: Optional[threading.Thread] = None thread_end_event: threading.Event = field(default_factory=threading.Event) - port_holder: List[int] = field(default_factory=list) + port_holder: list[int] = field(default_factory=list) def start(self) -> None: self.original_connection_limit = chia_policy.global_max_concurrent_connections @@ -214,10 +215,10 @@ async def test_loop(tmp_path: pathlib.Path) -> None: over = [] connection_limit = 25 - accept_loop_count_over: List[int] = [] + accept_loop_count_over: list[int] = [] server_output_lines = serve_output.splitlines() found_shutdown = False - shutdown_lines: List[str] = [] + shutdown_lines: list[str] = [] for line in server_output_lines: if not found_shutdown: if not line.casefold().endswith("shutting down"): diff --git a/chia/_tests/core/server/test_node_discovery.py b/chia/_tests/core/server/test_node_discovery.py index 3f718adc20ab..452194e009cb 100644 --- a/chia/_tests/core/server/test_node_discovery.py +++ b/chia/_tests/core/server/test_node_discovery.py @@ -2,7 +2,6 @@ from logging import Logger from pathlib import Path -from typing import Tuple import pytest @@ -15,7 +14,7 @@ @pytest.mark.anyio async def test_enable_private_networks( - two_nodes: Tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools], + two_nodes: tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools], ) -> None: chia_server = two_nodes[2] diff --git a/chia/_tests/core/server/test_rate_limits.py b/chia/_tests/core/server/test_rate_limits.py index 1d761b3d7243..ac1458018d7f 100644 --- a/chia/_tests/core/server/test_rate_limits.py +++ b/chia/_tests/core/server/test_rate_limits.py @@ -1,7 +1,6 @@ from __future__ import annotations import asyncio -from typing import List import pytest @@ -19,7 +18,7 @@ rl_v2 = [Capability.BASE, Capability.BLOCK_HEADERS, Capability.RATE_LIMITS_V2] rl_v1 = [Capability.BASE] node_with_params_b = node_with_params -test_different_versions_results: List[int] = [] +test_different_versions_results: list[int] = [] class TestRateLimits: diff --git a/chia/_tests/core/server/test_server.py b/chia/_tests/core/server/test_server.py index fbfdbc2bfa55..4b87c3fc4a1d 100644 --- a/chia/_tests/core/server/test_server.py +++ b/chia/_tests/core/server/test_server.py @@ -2,7 +2,7 @@ import logging from dataclasses import dataclass -from typing import Callable, Tuple, cast +from typing import Callable, cast import pytest from packaging.version import Version @@ -44,7 +44,7 @@ async def request_transaction(self, request: RequestTransaction) -> None: @pytest.mark.anyio async def test_duplicate_client_connection( - two_nodes: Tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools], self_hostname: str + two_nodes: tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools], self_hostname: str ) -> None: _, _, server_1, server_2, _ = two_nodes assert await server_2.start_client(PeerInfo(self_hostname, server_1.get_port()), None) @@ -54,7 +54,7 @@ async def test_duplicate_client_connection( @pytest.mark.anyio @pytest.mark.parametrize("method", [repr, str]) async def test_connection_string_conversion( - two_nodes_one_block: Tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools], + two_nodes_one_block: tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools], self_hostname: str, method: Callable[[object], str], ) -> None: @@ -178,7 +178,7 @@ def error_log_found(connection: WSChiaConnection) -> bool: @pytest.mark.anyio async def test_call_api_of_specific( - two_nodes: Tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools], self_hostname: str + two_nodes: tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools], self_hostname: str ) -> None: _, _, server_1, server_2, _ = two_nodes assert await server_1.start_client(PeerInfo(self_hostname, server_2.get_port()), None) @@ -193,7 +193,7 @@ async def test_call_api_of_specific( @pytest.mark.anyio async def test_call_api_of_specific_for_missing_peer( - two_nodes: Tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools] + two_nodes: tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools] ) -> None: _, _, server_1, server_2, _ = two_nodes diff --git a/chia/_tests/core/services/test_services.py b/chia/_tests/core/services/test_services.py index 581076608f59..86187ea7426b 100644 --- a/chia/_tests/core/services/test_services.py +++ b/chia/_tests/core/services/test_services.py @@ -5,7 +5,7 @@ import sys import time from pathlib import Path -from typing import Any, Dict +from typing import Any import aiohttp.client_exceptions import pytest @@ -39,11 +39,11 @@ async def __call__( self_hostname: str, port: uint16, root_path: Path, - net_config: Dict[str, Any], + net_config: dict[str, Any], ) -> RpcClient: ... -async def wait_for_daemon_connection(root_path: Path, config: Dict[str, Any], timeout: float = 15) -> DaemonProxy: +async def wait_for_daemon_connection(root_path: Path, config: dict[str, Any], timeout: float = 15) -> DaemonProxy: timeout = adjusted_timeout(timeout=timeout) start = time.monotonic() diff --git a/chia/_tests/core/ssl/test_ssl.py b/chia/_tests/core/ssl/test_ssl.py index 65423317dff7..b956ed1b83ff 100644 --- a/chia/_tests/core/ssl/test_ssl.py +++ b/chia/_tests/core/ssl/test_ssl.py @@ -151,8 +151,10 @@ async def test_farmer_mismatch_context(self, farmer_one_harvester_not_started, s ssl_context = ssl_context_for_client(ca_private_crt_path, ca_private_key_path, pub_crt, pub_key) caplog.clear() - with pytest.raises(Exception), ignore_ssl_cert_error(), caplog.at_level( - logging.DEBUG, logger="asyncio" + with ( + pytest.raises(Exception), + ignore_ssl_cert_error(), + caplog.at_level(logging.DEBUG, logger="asyncio"), ): await establish_connection(farmer_server, self_hostname, ssl_context) diff --git a/chia/_tests/core/test_cost_calculation.py b/chia/_tests/core/test_cost_calculation.py index 1b7241619c72..a80da5484ed3 100644 --- a/chia/_tests/core/test_cost_calculation.py +++ b/chia/_tests/core/test_cost_calculation.py @@ -2,7 +2,6 @@ import logging import pathlib -from typing import List import pytest from chia_rs import G1Element @@ -293,7 +292,7 @@ async def test_get_puzzle_and_solution_for_coin_performance(benchmark_runner: Be ) coin_spends = result.first() - spent_coins: List[Coin] = [] + spent_coins: list[Coin] = [] for spend in coin_spends.as_iter(): parent, puzzle, amount_program, _ = spend.as_iter() parent_coin_info = parent.as_atom() diff --git a/chia/_tests/core/test_db_conversion.py b/chia/_tests/core/test_db_conversion.py index 8761c3e7e0ab..c79e46a4786f 100644 --- a/chia/_tests/core/test_db_conversion.py +++ b/chia/_tests/core/test_db_conversion.py @@ -2,7 +2,6 @@ import random from pathlib import Path -from typing import List, Tuple import pytest @@ -32,7 +31,7 @@ def rand_bytes(num) -> bytes: async def test_blocks(default_1000_blocks, with_hints: bool): blocks = default_1000_blocks - hints: List[Tuple[bytes32, bytes]] = [] + hints: list[tuple[bytes32, bytes]] = [] for i in range(351): hints.append((bytes32(rand_bytes(32)), rand_bytes(20))) diff --git a/chia/_tests/core/test_db_validation.py b/chia/_tests/core/test_db_validation.py index 034d6896a440..71090c550494 100644 --- a/chia/_tests/core/test_db_validation.py +++ b/chia/_tests/core/test_db_validation.py @@ -4,7 +4,6 @@ import sqlite3 from contextlib import closing from pathlib import Path -from typing import List import pytest @@ -128,7 +127,7 @@ def test_db_validate_in_main_chain(invalid_in_chain: bool) -> None: validate_v2(db_file, validate_blocks=False) -async def make_db(db_file: Path, blocks: List[FullBlock]) -> None: +async def make_db(db_file: Path, blocks: list[FullBlock]) -> None: async with DBWrapper2.managed(database=db_file, reader_count=1, db_version=2) as db_wrapper: async with db_wrapper.writer_maybe_transaction() as conn: # this is done by chia init normally @@ -150,7 +149,7 @@ async def make_db(db_file: Path, blocks: List[FullBlock]) -> None: @pytest.mark.anyio -async def test_db_validate_default_1000_blocks(default_1000_blocks: List[FullBlock]) -> None: +async def test_db_validate_default_1000_blocks(default_1000_blocks: list[FullBlock]) -> None: with TempFile() as db_file: await make_db(db_file, default_1000_blocks) diff --git a/chia/_tests/core/test_farmer_harvester_rpc.py b/chia/_tests/core/test_farmer_harvester_rpc.py index 7182f0207495..44481966bc58 100644 --- a/chia/_tests/core/test_farmer_harvester_rpc.py +++ b/chia/_tests/core/test_farmer_harvester_rpc.py @@ -5,11 +5,12 @@ import operator import sys import time +from collections.abc import Awaitable from math import ceil from os import mkdir from pathlib import Path from shutil import copy -from typing import Any, Awaitable, Callable, Dict, List, Union, cast +from typing import Any, Callable, Union, cast import pytest @@ -358,8 +359,8 @@ def test_plot_matches_filter(filter_item: FilterItem, match: bool) -> None: @pytest.mark.skipif(sys.platform == "win32", reason="avoiding crashes on windows until we fix this (crashing workers)") async def test_farmer_get_harvester_plots_endpoints( harvester_farmer_environment: HarvesterFarmerEnvironment, - endpoint: Callable[[FarmerRpcClient, PaginatedRequestData], Awaitable[Dict[str, Any]]], - filtering: Union[List[FilterItem], List[str]], + endpoint: Callable[[FarmerRpcClient, PaginatedRequestData], Awaitable[dict[str, Any]]], + filtering: Union[list[FilterItem], list[str]], sort_key: str, reverse: bool, expected_plot_count: int, @@ -378,12 +379,12 @@ async def test_farmer_get_harvester_plots_endpoints( request: PaginatedRequestData if endpoint == FarmerRpcClient.get_harvester_plots_valid: request = PlotInfoRequestData( - harvester_id, uint32(0), uint32(0), cast(List[FilterItem], filtering), sort_key, reverse + harvester_id, uint32(0), uint32(0), cast(list[FilterItem], filtering), sort_key, reverse ) else: - request = PlotPathRequestData(harvester_id, uint32(0), uint32(0), cast(List[str], filtering), reverse) + request = PlotPathRequestData(harvester_id, uint32(0), uint32(0), cast(list[str], filtering), reverse) - def add_plot_directories(prefix: str, count: int) -> List[Path]: + def add_plot_directories(prefix: str, count: int) -> list[Path]: new_paths = [] for i in range(count): new_paths.append(harvester.root_path / f"{prefix}_{i}") diff --git a/chia/_tests/core/test_filter.py b/chia/_tests/core/test_filter.py index f4fd125d5e38..f776bc4e9861 100644 --- a/chia/_tests/core/test_filter.py +++ b/chia/_tests/core/test_filter.py @@ -1,7 +1,5 @@ from __future__ import annotations -from typing import List - import pytest from chiabip158 import PyBIP158 @@ -21,7 +19,7 @@ async def test_basic_filter_test(simulator_and_wallet): pool_reward_puzzle_hash=ph, ) for i in range(1, num_blocks): - byte_array_tx: List[bytes] = [] + byte_array_tx: list[bytes] = [] block = blocks[i] coins = block.get_included_reward_coins() coin_0 = bytearray(coins[0].puzzle_hash) diff --git a/chia/_tests/core/test_full_node_rpc.py b/chia/_tests/core/test_full_node_rpc.py index 1d0e247aa303..a6a512a0c89b 100644 --- a/chia/_tests/core/test_full_node_rpc.py +++ b/chia/_tests/core/test_full_node_rpc.py @@ -1,8 +1,6 @@ # flake8: noqa: F811, F401 from __future__ import annotations -from typing import List - import pytest from chia_rs import AugSchemeMPL from clvm.casts import int_to_bytes @@ -408,14 +406,14 @@ async def num_connections(): await client.close_connection(connections[0]["node_id"]) await time_out_assert(10, num_connections, 0) - blocks: List[FullBlock] = await client.get_blocks(0, 5) + blocks: list[FullBlock] = await client.get_blocks(0, 5) assert len(blocks) == 5 await full_node_api_1.reorg_from_index_to_new_index(ReorgProtocol(2, 55, bytes([0x2] * 32), None)) - new_blocks_0: List[FullBlock] = await client.get_blocks(0, 5) + new_blocks_0: list[FullBlock] = await client.get_blocks(0, 5) assert len(new_blocks_0) == 7 - new_blocks: List[FullBlock] = await client.get_blocks(0, 5, exclude_reorged=True) + new_blocks: list[FullBlock] = await client.get_blocks(0, 5, exclude_reorged=True) assert len(new_blocks) == 5 assert blocks[0].header_hash == new_blocks[0].header_hash assert blocks[1].header_hash == new_blocks[1].header_hash @@ -620,7 +618,7 @@ async def test_get_blockchain_state(one_wallet_and_one_simulator_services, self_ assert state["space"] == 0 assert state["average_block_time"] is None - blocks: List[FullBlock] = bt.get_consecutive_blocks(num_blocks) + blocks: list[FullBlock] = bt.get_consecutive_blocks(num_blocks) blocks = bt.get_consecutive_blocks(num_blocks, block_list_input=blocks, guarantee_transaction_block=True) for block in blocks: @@ -643,7 +641,7 @@ async def test_get_blockchain_state(one_wallet_and_one_simulator_services, self_ assert state["space"] > 0 assert state["average_block_time"] > 0 - block_records: List[BlockRecord] = [ + block_records: list[BlockRecord] = [ await full_node_api_1.full_node.blockchain.get_block_record_from_db(rec.header_hash) for rec in blocks ] first_non_transaction_block_index = -1 @@ -655,8 +653,8 @@ async def test_get_blockchain_state(one_wallet_and_one_simulator_services, self_ # so first_non_transaction_block_index != 0 assert first_non_transaction_block_index > 0 - transaction_blocks: List[BlockRecord] = [b for b in block_records if b.is_transaction_block] - non_transaction_block: List[BlockRecord] = [b for b in block_records if not b.is_transaction_block] + transaction_blocks: list[BlockRecord] = [b for b in block_records if b.is_transaction_block] + non_transaction_block: list[BlockRecord] = [b for b in block_records if not b.is_transaction_block] assert len(transaction_blocks) > 0 assert len(non_transaction_block) > 0 assert transaction_blocks[0] == await get_nearest_transaction_block( diff --git a/chia/_tests/core/test_merkle_set.py b/chia/_tests/core/test_merkle_set.py index 4dc57744c1cd..6d2fae441b03 100644 --- a/chia/_tests/core/test_merkle_set.py +++ b/chia/_tests/core/test_merkle_set.py @@ -5,7 +5,7 @@ from hashlib import sha256 from itertools import permutations from random import Random -from typing import List, Optional, Tuple +from typing import Optional import pytest from chia_rs import Coin, MerkleSet, compute_merkle_set_root, confirm_included_already_hashed @@ -292,7 +292,7 @@ async def test_merkle_set_random_regression() -> None: rng.seed(123456) for i in range(100): size = rng.randint(0, 4000) - values: List[bytes32] = [rand_hash(rng) for _ in range(size)] + values: list[bytes32] = [rand_hash(rng) for _ in range(size)] print(f"iter: {i}/100 size: {size}") for _ in range(10): @@ -304,7 +304,7 @@ async def test_merkle_set_random_regression() -> None: assert rust_root == python_root -def make_test_coins(n: int, rng: Random) -> List[Coin]: +def make_test_coins(n: int, rng: Random) -> list[Coin]: return [Coin(bytes32.random(rng), bytes32.random(rng), uint64(rng.randint(0, 10000000))) for i in range(n)] @@ -314,7 +314,7 @@ def test_validate_removals_full_list(num_coins: int, seeded_random: Random) -> N # the root can be computed by all the removals coins = make_test_coins(num_coins, seeded_random) - coin_map: List[Tuple[bytes32, Optional[Coin]]] = [] + coin_map: list[tuple[bytes32, Optional[Coin]]] = [] removals_merkle_set = MerkleSet([coin.name() for coin in coins]) for coin in coins: coin_map.append((coin.name(), coin)) @@ -330,8 +330,8 @@ def test_validate_additions_full_list(num_coins: int, batch_size: int, seeded_ra # the root can be computed by all the removals coins = make_test_coins(num_coins, seeded_random) - additions: List[Tuple[bytes32, List[Coin]]] = [] - leafs: List[bytes32] = [] + additions: list[tuple[bytes32, list[Coin]]] = [] + leafs: list[bytes32] = [] for coin_batch in to_batches(coins, batch_size): puzzle_hash = bytes32.random(seeded_random) additions.append((puzzle_hash, coin_batch.entries)) diff --git a/chia/_tests/core/test_program.py b/chia/_tests/core/test_program.py index cea5e494ec10..b24989daef04 100644 --- a/chia/_tests/core/test_program.py +++ b/chia/_tests/core/test_program.py @@ -1,7 +1,5 @@ from __future__ import annotations -from typing import List - from clvm.SExp import CastableType from clvm_tools import binutils @@ -26,7 +24,7 @@ def program_roundtrip(o: CastableType) -> None: def test_serialized_program_to() -> None: prg = "(q ((0x0101010101010101010101010101010101010101010101010101010101010101 80 123 (() (q . ())))))" # noqa - tests: List[CastableType] = [ + tests: list[CastableType] = [ 0, 1, (1, 2), diff --git a/chia/_tests/core/test_rpc_util.py b/chia/_tests/core/test_rpc_util.py index 8fad668388cc..04a503caccee 100644 --- a/chia/_tests/core/test_rpc_util.py +++ b/chia/_tests/core/test_rpc_util.py @@ -1,7 +1,6 @@ from __future__ import annotations from dataclasses import dataclass -from typing import List import pytest @@ -29,7 +28,7 @@ class TestRequestType(Streamable): @streamable @dataclass(frozen=True) class TestResponseObject(Streamable): - qat: List[str] + qat: list[str] sub: SubObject diff --git a/chia/_tests/core/test_seeder.py b/chia/_tests/core/test_seeder.py index a74d4a709e59..9d4250273698 100644 --- a/chia/_tests/core/test_seeder.py +++ b/chia/_tests/core/test_seeder.py @@ -4,7 +4,7 @@ from dataclasses import dataclass from ipaddress import IPv4Address, IPv6Address from socket import AF_INET, AF_INET6, SOCK_STREAM -from typing import Dict, List, Tuple, cast +from typing import cast import dns import pytest @@ -17,7 +17,7 @@ timeout = 0.5 -def generate_test_combs() -> List[Tuple[bool, str, dns.rdatatype.RdataType]]: +def generate_test_combs() -> list[tuple[bool, str, dns.rdatatype.RdataType]]: """ Generates all the combinations of tests we want to run. """ @@ -60,7 +60,7 @@ def __getattr__(self, item: object) -> None: @pytest.fixture(scope="module") -def database_peers() -> Dict[str, PeerReliability]: +def database_peers() -> dict[str, PeerReliability]: """ We override the values in the class with these dbs, to save time. """ @@ -98,7 +98,7 @@ async def make_dns_query( return await dns.asyncquery.udp(q=dns_message, where=target_address, timeout=d_timeout, port=port) -def get_addresses(num_subnets: int = 10) -> Tuple[List[IPv4Address], List[IPv6Address]]: +def get_addresses(num_subnets: int = 10) -> tuple[list[IPv4Address], list[IPv6Address]]: ipv4 = [] ipv6 = [] # generate 2500 ipv4 and 2500 ipv6 peers, it's just a string so who cares @@ -110,7 +110,7 @@ def get_addresses(num_subnets: int = 10) -> Tuple[List[IPv4Address], List[IPv6Ad def assert_standard_results( - std_query_answer: List[dns.rrset.RRset], request_type: dns.rdatatype.RdataType, num_ns: int + std_query_answer: list[dns.rrset.RRset], request_type: dns.rdatatype.RdataType, num_ns: int ) -> None: if request_type == dns.rdatatype.A: assert len(std_query_answer) == 1 # only 1 kind of answer @@ -278,7 +278,7 @@ async def test_dns_queries( @pytest.mark.parametrize("use_tcp, target_address, request_type", all_test_combinations) async def test_db_processing( seeder_service: DNSServer, - database_peers: Dict[str, PeerReliability], + database_peers: dict[str, PeerReliability], use_tcp: bool, target_address: str, request_type: dns.rdatatype.RdataType, diff --git a/chia/_tests/core/util/test_config.py b/chia/_tests/core/util/test_config.py index f8c90dd089bc..a532703ea946 100644 --- a/chia/_tests/core/util/test_config.py +++ b/chia/_tests/core/util/test_config.py @@ -10,7 +10,7 @@ from pathlib import Path from threading import Thread from time import sleep -from typing import Any, Dict, Optional +from typing import Any, Optional import pytest import yaml @@ -37,7 +37,7 @@ def write_config( root_path: Path, - config: Dict, + config: dict, atomic_write: bool, do_sleep: bool, iterations: int, @@ -74,7 +74,7 @@ def write_config( def read_and_compare_config( - root_path: Path, default_config: Dict, do_sleep: bool, iterations: int, error_queue: Optional[Queue] = None + root_path: Path, default_config: dict, do_sleep: bool, iterations: int, error_queue: Optional[Queue] = None ): """ Wait for a random amount of time, read the config and compare with the @@ -99,7 +99,7 @@ def read_and_compare_config( raise -async def create_reader_and_writer_tasks(root_path: Path, default_config: Dict): +async def create_reader_and_writer_tasks(root_path: Path, default_config: dict): """ Spin-off reader and writer threads and wait for completion """ @@ -135,7 +135,7 @@ async def create_reader_and_writer_tasks(root_path: Path, default_config: Dict): raise error_queue.get() -def run_reader_and_writer_tasks(root_path: Path, default_config: Dict): +def run_reader_and_writer_tasks(root_path: Path, default_config: dict): """ Subprocess entry point. This function spins-off threads to perform read/write tasks concurrently, possibly leading to synchronization issues accessing config data. @@ -144,12 +144,12 @@ def run_reader_and_writer_tasks(root_path: Path, default_config: Dict): @pytest.fixture(scope="function") -def default_config_dict() -> Dict: +def default_config_dict() -> dict: """ Returns a dictionary containing the default config.yaml contents """ content: str = initial_config_file("config.yaml") - config: Dict = yaml.safe_load(content) + config: dict = yaml.safe_load(content) return config @@ -208,7 +208,7 @@ def test_load_config(self, root_path_populated_with_config, default_config_dict) """ root_path: Path = root_path_populated_with_config # When: loading a newly created config - config: Dict = load_config(root_path=root_path, filename="config.yaml") + config: dict = load_config(root_path=root_path, filename="config.yaml") assert config is not None # Expect: config values should match the defaults (from a small sampling) assert config["daemon_port"] == default_config_dict["daemon_port"] == 55400 @@ -242,7 +242,7 @@ def test_save_config(self, root_path_populated_with_config, default_config_dict) calling load_config(). """ root_path: Path = root_path_populated_with_config - config: Dict = copy.deepcopy(default_config_dict) + config: dict = copy.deepcopy(default_config_dict) # When: modifying the config config["harvester"]["farmer_peers"][0]["host"] = "oldmacdonald.eie.io" # Sanity check that we didn't modify the default config @@ -255,7 +255,7 @@ def test_save_config(self, root_path_populated_with_config, default_config_dict) save_config(root_path=root_path, filename="config.yaml", config_data=config) # Expect: modifications should be preserved in the config read from disk - loaded: Dict = load_config(root_path=root_path, filename="config.yaml") + loaded: dict = load_config(root_path=root_path, filename="config.yaml") assert loaded["harvester"]["farmer_peers"][0]["host"] == "oldmacdonald.eie.io" def test_multiple_writers(self, root_path_populated_with_config, default_config_dict): @@ -310,7 +310,7 @@ async def test_non_atomic_writes(self, root_path_populated_with_config, default_ await asyncio.gather(*all_tasks) @pytest.mark.parametrize("prefix", [None]) - def test_selected_network_address_prefix_default_config(self, config_with_address_prefix: Dict[str, Any]) -> None: + def test_selected_network_address_prefix_default_config(self, config_with_address_prefix: dict[str, Any]) -> None: """ Temp config.yaml created using a default config. address_prefix is defaulted to "xch" """ @@ -319,7 +319,7 @@ def test_selected_network_address_prefix_default_config(self, config_with_addres assert prefix == "xch" @pytest.mark.parametrize("prefix", ["txch"]) - def test_selected_network_address_prefix_testnet_config(self, config_with_address_prefix: Dict[str, Any]) -> None: + def test_selected_network_address_prefix_testnet_config(self, config_with_address_prefix: dict[str, Any]) -> None: """ Temp config.yaml created using a modified config. address_prefix is set to "txch" """ @@ -327,7 +327,7 @@ def test_selected_network_address_prefix_testnet_config(self, config_with_addres prefix = selected_network_address_prefix(config) assert prefix == "txch" - def test_selected_network_address_prefix_config_dict(self, default_config_dict: Dict[str, Any]) -> None: + def test_selected_network_address_prefix_config_dict(self, default_config_dict: dict[str, Any]) -> None: """ Modified config dictionary has address_prefix set to "customxch" """ diff --git a/chia/_tests/core/util/test_jsonify.py b/chia/_tests/core/util/test_jsonify.py index 713bd1dfef27..e6920fcefbde 100644 --- a/chia/_tests/core/util/test_jsonify.py +++ b/chia/_tests/core/util/test_jsonify.py @@ -1,14 +1,14 @@ from __future__ import annotations from dataclasses import dataclass -from typing import Any, Dict, List, Optional, Tuple +from typing import Any, Optional from chia.types.blockchain_format.sized_bytes import bytes32 from chia.util.ints import uint32 from chia.util.streamable import Streamable, recurse_jsonify, streamable -def dict_with_types(d: Dict[str, Any]) -> Dict[str, Any]: +def dict_with_types(d: dict[str, Any]) -> dict[str, Any]: return {k: (v, type(v)) for k, v in d.items()} @@ -64,7 +64,7 @@ def test_list() -> None: @streamable @dataclass(frozen=True) class ListTest(Streamable): - d: List[str] + d: list[str] t = ListTest(["foo", "bar"]) @@ -75,7 +75,7 @@ def test_tuple() -> None: @streamable @dataclass(frozen=True) class TupleTest(Streamable): - d: Tuple[str, uint32, str] + d: tuple[str, uint32, str] t = TupleTest(("foo", uint32(123), "bar")) @@ -85,14 +85,14 @@ class TupleTest(Streamable): @streamable @dataclass(frozen=True) class NestedWithTupleInner(Streamable): - a: Tuple[str, uint32, str] + a: tuple[str, uint32, str] b: bytes @streamable @dataclass(frozen=True) class NestedWithTupleOuter(Streamable): - a: Tuple[NestedWithTupleInner, uint32, str] + a: tuple[NestedWithTupleInner, uint32, str] def test_nested_with_tuple() -> None: @@ -113,7 +113,7 @@ class NestedWithListInner(Streamable): @streamable @dataclass(frozen=True) class NestedWithListOuter(Streamable): - a: List[NestedWithListInner] + a: list[NestedWithListInner] def test_nested_with_list() -> None: @@ -125,7 +125,7 @@ def test_nested_with_list() -> None: @streamable @dataclass(frozen=True) class TestNestedInner(Streamable): - a: Tuple[str, uint32, str] + a: tuple[str, uint32, str] b: bytes diff --git a/chia/_tests/core/util/test_keychain.py b/chia/_tests/core/util/test_keychain.py index 3f1f45f1ba30..ba21e90eedcc 100644 --- a/chia/_tests/core/util/test_keychain.py +++ b/chia/_tests/core/util/test_keychain.py @@ -3,7 +3,7 @@ import json import random from dataclasses import dataclass, replace -from typing import Callable, List, Optional, Tuple +from typing import Callable, Optional import importlib_resources import pytest @@ -318,7 +318,7 @@ def test_key_data_without_secrets(key_info: KeyInfo) -> None: ((_24keyinfo.mnemonic.split(), _24keyinfo.entropy, KeyDataSecrets.generate().private_key), "private_key"), ], ) -def test_key_data_secrets_post_init(input_data: Tuple[List[str], bytes, PrivateKey], data_type: str) -> None: +def test_key_data_secrets_post_init(input_data: tuple[list[str], bytes, PrivateKey], data_type: str) -> None: with pytest.raises(KeychainKeyDataMismatch, match=data_type): KeyDataSecrets(*input_data) @@ -339,7 +339,7 @@ def test_key_data_secrets_post_init(input_data: Tuple[List[str], bytes, PrivateK ], ) def test_key_data_post_init( - input_data: Tuple[uint32, G1Element, Optional[str], Optional[KeyDataSecrets]], data_type: str + input_data: tuple[uint32, G1Element, Optional[str], Optional[KeyDataSecrets]], data_type: str ) -> None: with pytest.raises(KeychainKeyDataMismatch, match=data_type): KeyData(*input_data) diff --git a/chia/_tests/core/util/test_keyring_wrapper.py b/chia/_tests/core/util/test_keyring_wrapper.py index 3c4fa3cb79b8..49f8aefaa12f 100644 --- a/chia/_tests/core/util/test_keyring_wrapper.py +++ b/chia/_tests/core/util/test_keyring_wrapper.py @@ -1,7 +1,6 @@ from __future__ import annotations import logging -from typing import Type import pytest @@ -457,7 +456,7 @@ def test_set_special_labels(self, label: str, empty_temp_file_keyring: TempKeyri ], ) def test_set_label_failures( - self, label: str, exception: Type[KeychainLabelError], message: str, empty_temp_file_keyring: TempKeyring + self, label: str, exception: type[KeychainLabelError], message: str, empty_temp_file_keyring: TempKeyring ) -> None: keyring_wrapper = KeyringWrapper.get_shared_instance() keyring_wrapper.keyring.set_label(1, "one") diff --git a/chia/_tests/core/util/test_log_exceptions.py b/chia/_tests/core/util/test_log_exceptions.py index 846218d5ddba..bf2bb2a874bc 100644 --- a/chia/_tests/core/util/test_log_exceptions.py +++ b/chia/_tests/core/util/test_log_exceptions.py @@ -4,7 +4,7 @@ import dataclasses import logging import re -from typing import Tuple, Type, Union +from typing import Union import pytest @@ -21,8 +21,8 @@ def logger_fixture() -> logging.Logger: @dataclasses.dataclass class ErrorCase: - type_to_raise: Type[BaseException] - type_to_catch: Union[Type[BaseException], Tuple[Type[BaseException], ...]] + type_to_raise: type[BaseException] + type_to_catch: Union[type[BaseException], tuple[type[BaseException], ...]] should_match: bool diff --git a/chia/_tests/core/util/test_streamable.py b/chia/_tests/core/util/test_streamable.py index e16cd33372d4..52d416ba6e1c 100644 --- a/chia/_tests/core/util/test_streamable.py +++ b/chia/_tests/core/util/test_streamable.py @@ -3,7 +3,7 @@ import io import re from dataclasses import dataclass, field, fields -from typing import Any, Callable, Dict, List, Optional, Tuple, Type, get_type_hints +from typing import Any, Callable, Optional, get_type_hints import pytest from chia_rs import G1Element @@ -69,7 +69,7 @@ def test_dict_not_suppported() -> None: @streamable @dataclass(frozen=True) class TestClassDict(Streamable): - a: Dict[str, str] + a: dict[str, str] @dataclass(frozen=True) @@ -118,8 +118,8 @@ class StreamableFromDict2(Streamable): @streamable @dataclass(frozen=True) class ConvertTupleFailures(Streamable): - a: Tuple[uint8, uint8] - b: Tuple[uint8, Tuple[uint8, uint8]] + a: tuple[uint8, uint8] + b: tuple[uint8, tuple[uint8, uint8]] @pytest.mark.parametrize( @@ -137,7 +137,7 @@ class ConvertTupleFailures(Streamable): pytest.param({"a": (1, 1), "b": (1, "22")}, InvalidTypeError, id="b: invalid type str"), ], ) -def test_convert_tuple_failures(input_dict: Dict[str, Any], error: Any) -> None: +def test_convert_tuple_failures(input_dict: dict[str, Any], error: Any) -> None: with pytest.raises(error): streamable_from_dict(ConvertTupleFailures, input_dict) @@ -145,8 +145,8 @@ def test_convert_tuple_failures(input_dict: Dict[str, Any], error: Any) -> None: @streamable @dataclass(frozen=True) class ConvertListFailures(Streamable): - a: List[uint8] - b: List[List[uint8]] + a: list[uint8] + b: list[list[uint8]] @pytest.mark.parametrize( @@ -160,7 +160,7 @@ class ConvertListFailures(Streamable): pytest.param({"a": [1, 1], "b": [1, "22"]}, InvalidTypeError, id="b: invalid type str"), ], ) -def test_convert_list_failures(input_dict: Dict[str, Any], error: Any) -> None: +def test_convert_list_failures(input_dict: dict[str, Any], error: Any) -> None: with pytest.raises(error): streamable_from_dict(ConvertListFailures, input_dict) @@ -189,7 +189,7 @@ class ConvertByteTypeFailures(Streamable): pytest.param({"a": "00000000", "b": "invalid"}, ConversionError, id="b: invalid hex string"), ], ) -def test_convert_byte_type_failures(input_dict: Dict[str, Any], error: Any) -> None: +def test_convert_byte_type_failures(input_dict: dict[str, Any], error: Any) -> None: with pytest.raises(error): streamable_from_dict(ConvertByteTypeFailures, input_dict) @@ -214,7 +214,7 @@ class ConvertUnhashableTypeFailures(Streamable): pytest.param({"a": b"\00" * G1Element.SIZE}, ValueError, "BLS Error G1InfinityInvalidBits"), ], ) -def test_convert_unhashable_type_failures(input_dict: Dict[str, Any], error: Any, error_msg: str) -> None: +def test_convert_unhashable_type_failures(input_dict: dict[str, Any], error: Any, error_msg: str) -> None: with pytest.raises(error, match=re.escape(error_msg)): streamable_from_dict(ConvertUnhashableTypeFailures, input_dict) @@ -240,7 +240,7 @@ class ConvertPrimitiveFailures(Streamable): pytest.param({"a": 0, "b": uint8(1), "c": NoStrClass()}, ConversionError, id="c: invalid value"), ], ) -def test_convert_primitive_failures(input_dict: Dict[str, Any], error: Any) -> None: +def test_convert_primitive_failures(input_dict: dict[str, Any], error: Any) -> None: with pytest.raises(error): streamable_from_dict(ConvertPrimitiveFailures, input_dict) @@ -309,7 +309,7 @@ def test_convert_primitive_failures(input_dict: Dict[str, Any], error: Any) -> N ], ) def test_streamable_from_dict_failures( - test_class: Type[Streamable], input_dict: Dict[str, Any], error: Any, error_message: str + test_class: type[Streamable], input_dict: dict[str, Any], error: Any, error_message: str ) -> None: with pytest.raises(error, match=re.escape(error_message)): streamable_from_dict(test_class, input_dict) @@ -320,7 +320,7 @@ def test_streamable_from_dict_failures( class TestFromJsonDictDefaultValues(Streamable): a: uint64 = uint64(1) b: str = "default" - c: List[uint64] = field(default_factory=list) + c: list[uint64] = field(default_factory=list) @pytest.mark.parametrize( @@ -333,30 +333,30 @@ class TestFromJsonDictDefaultValues(Streamable): [{"a": 2, "b": "not_default", "c": [1, 2]}, {"a": 2, "b": "not_default", "c": [1, 2]}], ], ) -def test_from_json_dict_default_values(input_dict: Dict[str, object], output_dict: Dict[str, object]) -> None: +def test_from_json_dict_default_values(input_dict: dict[str, object], output_dict: dict[str, object]) -> None: assert str(TestFromJsonDictDefaultValues.from_json_dict(input_dict).to_json_dict()) == str(output_dict) def test_basic_list() -> None: a = [1, 2, 3] assert is_type_List(type(a)) - assert is_type_List(List) - assert is_type_List(List[int]) - assert is_type_List(List[uint8]) assert is_type_List(list) - assert not is_type_List(type(Tuple)) + assert is_type_List(list[int]) + assert is_type_List(list[uint8]) + assert is_type_List(list) + assert not is_type_List(type(tuple)) assert not is_type_List(tuple) assert not is_type_List(dict) def test_not_lists() -> None: - assert not is_type_List(Dict) + assert not is_type_List(dict) def test_basic_optional() -> None: assert is_type_SpecificOptional(Optional[int]) assert is_type_SpecificOptional(Optional[Optional[int]]) - assert not is_type_SpecificOptional(List[int]) + assert not is_type_SpecificOptional(list[int]) @streamable @@ -388,15 +388,15 @@ class PostInitTestClassOptional(Streamable): @streamable @dataclass(frozen=True) class PostInitTestClassList(Streamable): - a: List[uint8] - b: List[List[G1Element]] + a: list[uint8] + b: list[list[G1Element]] @streamable @dataclass(frozen=True) class PostInitTestClassTuple(Streamable): - a: Tuple[uint8, str] - b: Tuple[Tuple[uint8, str], bytes32] + a: tuple[uint8, str] + b: tuple[tuple[uint8, str], bytes32] @pytest.mark.parametrize( @@ -410,8 +410,8 @@ class PostInitTestClassTuple(Streamable): (PostInitTestClassOptional, (12, None, 13, None)), ], ) -def test_post_init_valid(test_class: Type[Any], args: Tuple[Any, ...]) -> None: - def validate_item_type(type_in: Type[Any], item: object) -> bool: +def test_post_init_valid(test_class: type[Any], args: tuple[Any, ...]) -> None: + def validate_item_type(type_in: type[Any], item: object) -> bool: if is_type_SpecificOptional(type_in): return item is None or validate_item_type(get_args(type_in)[0], item) if is_type_Tuple(type_in): @@ -449,7 +449,7 @@ def validate_item_type(type_in: Type[Any], item: object) -> bool: (PostInitTestClassOptional, ([], None, None, None), ValueError), ], ) -def test_post_init_failures(test_class: Type[Any], args: Tuple[Any, ...], expected_exception: Type[Exception]) -> None: +def test_post_init_failures(test_class: type[Any], args: tuple[Any, ...], expected_exception: type[Exception]) -> None: with pytest.raises(expected_exception): test_class(*args) @@ -460,11 +460,11 @@ def test_basic() -> None: class TestClass(Streamable): a: uint32 b: uint32 - c: List[uint32] - d: List[List[uint32]] + c: list[uint32] + d: list[list[uint32]] e: Optional[uint32] f: Optional[uint32] - g: Tuple[uint32, str, bytes] + g: tuple[uint32, str, bytes] # we want to test invalid here, hence the ignore. a = TestClass(24, 352, [1, 2, 4], [[1, 2, 3], [3, 4]], 728, None, (383, "hello", b"goodbye")) # type: ignore[arg-type,list-item] # noqa: E501 @@ -503,7 +503,7 @@ def test_json(bt: BlockTools) -> None: class OptionalTestClass(Streamable): a: Optional[str] b: Optional[bool] - c: Optional[List[Optional[str]]] + c: Optional[list[Optional[str]]] @pytest.mark.parametrize( @@ -517,7 +517,7 @@ class OptionalTestClass(Streamable): (None, None, None), ], ) -def test_optional_json(a: Optional[str], b: Optional[bool], c: Optional[List[Optional[str]]]) -> None: +def test_optional_json(a: Optional[str], b: Optional[bool], c: Optional[list[Optional[str]]]) -> None: obj: OptionalTestClass = OptionalTestClass.from_json_dict({"a": a, "b": b, "c": c}) assert obj.a == a assert obj.b == b @@ -527,14 +527,14 @@ def test_optional_json(a: Optional[str], b: Optional[bool], c: Optional[List[Opt @streamable @dataclass(frozen=True) class TestClassRecursive1(Streamable): - a: List[uint32] + a: list[uint32] @streamable @dataclass(frozen=True) class TestClassRecursive2(Streamable): a: uint32 - b: List[Optional[List[TestClassRecursive1]]] + b: list[Optional[list[TestClassRecursive1]]] c: bytes32 @@ -586,7 +586,7 @@ def test_ambiguous_deserialization_list() -> None: @streamable @dataclass(frozen=True) class TestClassList(Streamable): - a: List[uint8] + a: list[uint8] # Does not have the required elements with pytest.raises(ValueError): @@ -597,7 +597,7 @@ def test_ambiguous_deserialization_tuple() -> None: @streamable @dataclass(frozen=True) class TestClassTuple(Streamable): - a: Tuple[uint8, str] + a: tuple[uint8, str] # Does not have the required elements with pytest.raises(AssertionError): diff --git a/chia/_tests/db/test_db_wrapper.py b/chia/_tests/db/test_db_wrapper.py index 3186136bb7a4..88df3dea71e9 100644 --- a/chia/_tests/db/test_db_wrapper.py +++ b/chia/_tests/db/test_db_wrapper.py @@ -3,7 +3,7 @@ import asyncio import contextlib from dataclasses import dataclass -from typing import TYPE_CHECKING, Callable, List, Optional, Type +from typing import TYPE_CHECKING, Callable, Optional import aiosqlite import pytest @@ -54,7 +54,7 @@ async def decrement_counter(db_wrapper: DBWrapper2) -> None: await connection.execute("UPDATE counter SET value = :value", {"value": new_value}) -async def sum_counter(db_wrapper: DBWrapper2, output: List[int]) -> None: +async def sum_counter(db_wrapper: DBWrapper2, output: list[int]) -> None: async with db_wrapper.reader_no_transaction() as connection: async with connection.execute("SELECT value FROM counter") as cursor: row = await cursor.fetchone() @@ -340,7 +340,7 @@ async def test_concurrent_readers(acquire_outside: bool, get_reader_method: GetR await exit_stack.enter_async_context(get_reader_method(db_wrapper)()) tasks = [] - values: List[int] = [] + values: list[int] = [] for index in range(concurrent_task_count): task = asyncio.create_task(sum_counter(db_wrapper, values)) tasks.append(task) @@ -369,7 +369,7 @@ async def test_mixed_readers_writers(acquire_outside: bool, get_reader_method: G await exit_stack.enter_async_context(get_reader_method(db_wrapper)()) tasks = [] - values: List[int] = [] + values: list[int] = [] for index in range(concurrent_task_count): task = asyncio.create_task(increment_counter(db_wrapper)) tasks.append(task) @@ -495,11 +495,11 @@ async def test_foreign_key_pragma_rolls_back_on_foreign_key_error() -> None: @dataclass class RowFactoryCase: id: str - factory: Optional[Type[aiosqlite.Row]] + factory: Optional[type[aiosqlite.Row]] marks: Marks = () -row_factory_cases: List[RowFactoryCase] = [ +row_factory_cases: list[RowFactoryCase] = [ RowFactoryCase(id="default named tuple", factory=None), RowFactoryCase(id="aiosqlite row", factory=aiosqlite.Row), ] diff --git a/chia/_tests/environments/common.py b/chia/_tests/environments/common.py index a74b8a7c812c..dec72951d101 100644 --- a/chia/_tests/environments/common.py +++ b/chia/_tests/environments/common.py @@ -1,7 +1,7 @@ from __future__ import annotations from dataclasses import dataclass -from typing import ClassVar, Protocol, Tuple, TypeVar +from typing import ClassVar, Protocol, TypeVar from chia.rpc.rpc_server import RpcApiProtocol, RpcServer, RpcServiceProtocol from chia.server.api_protocol import ApiProtocol @@ -17,7 +17,7 @@ class ServiceEnvironment(Protocol[T_Node, T_RpcApi, T_PeerApi]): service: Service[T_Node, T_PeerApi, T_RpcApi] - __match_args__: ClassVar[Tuple[str, ...]] = () + __match_args__: ClassVar[tuple[str, ...]] = () @property def node(self) -> T_Node: ... diff --git a/chia/_tests/environments/full_node.py b/chia/_tests/environments/full_node.py index 09a6960bd111..7c20ee977499 100644 --- a/chia/_tests/environments/full_node.py +++ b/chia/_tests/environments/full_node.py @@ -1,7 +1,7 @@ from __future__ import annotations from dataclasses import dataclass -from typing import TYPE_CHECKING, ClassVar, Tuple, cast +from typing import TYPE_CHECKING, ClassVar, cast from chia._tests.environments.common import ServiceEnvironment from chia.full_node.full_node import FullNode @@ -20,7 +20,7 @@ class FullNodeEnvironment: None, ) - __match_args__: ClassVar[Tuple[str, ...]] = () + __match_args__: ClassVar[tuple[str, ...]] = () service: Service[FullNode, FullNodeSimulator, SimulatorFullNodeRpcApi] diff --git a/chia/_tests/environments/wallet.py b/chia/_tests/environments/wallet.py index ed9092b311a0..1c8eefcd713d 100644 --- a/chia/_tests/environments/wallet.py +++ b/chia/_tests/environments/wallet.py @@ -3,7 +3,7 @@ import json import operator from dataclasses import asdict, dataclass, field -from typing import TYPE_CHECKING, ClassVar, Dict, List, Optional, Tuple, Union, cast +from typing import TYPE_CHECKING, ClassVar, Optional, Union, cast from chia._tests.environments.common import ServiceEnvironment from chia.rpc.full_node_rpc_client import FullNodeRpcClient @@ -28,9 +28,9 @@ class BalanceCheckingError(Exception): - errors: Dict[Union[int, str], List[str]] + errors: dict[Union[int, str], list[str]] - def __init__(self, errors: Dict[Union[int, str], List[str]]) -> None: + def __init__(self, errors: dict[Union[int, str], list[str]]) -> None: self.errors = errors def __repr__(self) -> str: @@ -47,10 +47,10 @@ class WalletState: @dataclass class WalletStateTransition: - pre_block_balance_updates: Dict[Union[int, str], Dict[str, int]] = field(default_factory=dict) - post_block_balance_updates: Dict[Union[int, str], Dict[str, int]] = field(default_factory=dict) - pre_block_additional_balance_info: Dict[Union[int, str], Dict[str, int]] = field(default_factory=dict) - post_block_additional_balance_info: Dict[Union[int, str], Dict[str, int]] = field(default_factory=dict) + pre_block_balance_updates: dict[Union[int, str], dict[str, int]] = field(default_factory=dict) + post_block_balance_updates: dict[Union[int, str], dict[str, int]] = field(default_factory=dict) + pre_block_additional_balance_info: dict[Union[int, str], dict[str, int]] = field(default_factory=dict) + post_block_additional_balance_info: dict[Union[int, str], dict[str, int]] = field(default_factory=dict) @dataclass @@ -60,14 +60,14 @@ class WalletEnvironment: "WalletEnvironment", None ) - __match_args__: ClassVar[Tuple[str, ...]] = () + __match_args__: ClassVar[tuple[str, ...]] = () service: Service[WalletNode, WalletNodeAPI, WalletRpcApi] # TODO: maybe put this in the protocol? rpc_client: WalletRpcClient # TODO: added the default, but should think through implementing it etc. `.create()`? - wallet_states: Dict[uint32, WalletState] = field(default_factory=dict) - wallet_aliases: Dict[str, int] = field(default_factory=dict) + wallet_states: dict[uint32, WalletState] = field(default_factory=dict) + wallet_aliases: dict[str, int] = field(default_factory=dict) @property def node(self) -> WalletNode: @@ -113,31 +113,31 @@ def alias_wallet_id(self, wallet_id: uint32) -> Union[uint32, str]: """ This function turns a wallet id into an alias if one is available or the same wallet id if one is not. """ - inverted_wallet_aliases: Dict[int, str] = {v: k for k, v in self.wallet_aliases.items()} + inverted_wallet_aliases: dict[int, str] = {v: k for k, v in self.wallet_aliases.items()} if wallet_id in inverted_wallet_aliases: return inverted_wallet_aliases[wallet_id] else: return wallet_id - async def check_balances(self, additional_balance_info: Dict[Union[int, str], Dict[str, int]] = {}) -> None: + async def check_balances(self, additional_balance_info: dict[Union[int, str], dict[str, int]] = {}) -> None: """ This function checks the internal representation of what the balances should be against the balances that the wallet actually returns via the RPC. Likely this should be called as part of WalletTestFramework.process_pending_states instead of directly. """ - dealiased_additional_balance_info: Dict[uint32, Dict[str, int]] = { + dealiased_additional_balance_info: dict[uint32, dict[str, int]] = { self.dealias_wallet_id(k): v for k, v in additional_balance_info.items() } - errors: Dict[Union[int, str], List[str]] = {} + errors: dict[Union[int, str], list[str]] = {} for wallet_id in self.wallet_state_manager.wallets: if wallet_id not in self.wallet_states: raise KeyError(f"No wallet state for wallet id {wallet_id} (alias: {self.alias_wallet_id(wallet_id)})") wallet_state: WalletState = self.wallet_states[wallet_id] - wallet_errors: List[str] = [] + wallet_errors: list[str] = [] assert self.node.logged_in_fingerprint is not None - expected_result: Dict[str, int] = { + expected_result: dict[str, int] = { **wallet_state.balance.to_json_dict(), "wallet_id": wallet_id, "wallet_type": self.wallet_state_manager.wallets[wallet_id].type().value, @@ -148,7 +148,7 @@ async def check_balances(self, additional_balance_info: Dict[Union[int, str], Di else {} ), } - balance_response: Dict[str, int] = await self.rpc_client.get_wallet_balance(wallet_id) + balance_response: dict[str, int] = await self.rpc_client.get_wallet_balance(wallet_id) if not expected_result.items() <= balance_response.items(): for key, value in expected_result.items(): @@ -165,7 +165,7 @@ async def check_balances(self, additional_balance_info: Dict[Union[int, str], Di if errors != {}: raise BalanceCheckingError(errors) - async def change_balances(self, update_dictionary: Dict[Union[int, str], Dict[str, int]]) -> None: + async def change_balances(self, update_dictionary: dict[Union[int, str], dict[str, int]]) -> None: """ This method changes the internal representation of what the wallet balances should be. This is probably necessary to call before check_balances as most wallet operations will result in a balance change that causes @@ -193,7 +193,7 @@ async def change_balances(self, update_dictionary: Dict[Union[int, str], Dict[st for wallet_id_or_alias, kwargs in update_dictionary.items(): wallet_id: uint32 = self.dealias_wallet_id(wallet_id_or_alias) - new_values: Dict[str, int] = {} + new_values: dict[str, int] = {} existing_values: Balance = await self.node.get_balance(wallet_id) if "init" in kwargs and kwargs["init"]: new_values = {k: v for k, v in kwargs.items() if k not in ("set_remainder", "init")} @@ -247,10 +247,10 @@ async def change_balances(self, update_dictionary: Dict[Union[int, str], Dict[st } async def wait_for_transactions_to_settle( - self, full_node_api: FullNodeSimulator, _exclude_from_mempool_check: List[bytes32] = [] - ) -> List[TransactionRecord]: + self, full_node_api: FullNodeSimulator, _exclude_from_mempool_check: list[bytes32] = [] + ) -> list[TransactionRecord]: # Gather all pending transactions - pending_txs: List[TransactionRecord] = await self.wallet_state_manager.tx_store.get_all_unconfirmed() + pending_txs: list[TransactionRecord] = await self.wallet_state_manager.tx_store.get_all_unconfirmed() # Filter clawback txs pending_txs = [ tx @@ -269,11 +269,11 @@ class WalletTestFramework: full_node: FullNodeSimulator full_node_rpc_client: FullNodeRpcClient trusted_full_node: bool - environments: List[WalletEnvironment] + environments: list[WalletEnvironment] tx_config: TXConfig = DEFAULT_TX_CONFIG async def process_pending_states( - self, state_transitions: List[WalletStateTransition], invalid_transactions: List[bytes32] = [] + self, state_transitions: list[WalletStateTransition], invalid_transactions: list[bytes32] = [] ) -> None: """ This is the main entry point for processing state in wallet tests. It does the following things: @@ -288,16 +288,16 @@ async def process_pending_states( """ # Take note of the number of puzzle hashes if we're supposed to be reusing if self.tx_config.reuse_puzhash: - puzzle_hash_indexes: List[Dict[uint32, Optional[DerivationRecord]]] = [] + puzzle_hash_indexes: list[dict[uint32, Optional[DerivationRecord]]] = [] for env in self.environments: - ph_indexes: Dict[uint32, Optional[DerivationRecord]] = {} + ph_indexes: dict[uint32, Optional[DerivationRecord]] = {} for wallet_id in env.wallet_state_manager.wallets: ph_indexes[wallet_id] = ( await env.wallet_state_manager.puzzle_store.get_current_derivation_record_for_wallet(wallet_id) ) puzzle_hash_indexes.append(ph_indexes) - pending_txs: List[List[TransactionRecord]] = [] + pending_txs: list[list[TransactionRecord]] = [] # Check balances prior to block try: @@ -354,7 +354,7 @@ async def process_pending_states( try: await self.full_node.check_transactions_confirmed(env.wallet_state_manager, txs) except TimeoutError: # pragma: no cover - unconfirmed: List[TransactionRecord] = await env.wallet_state_manager.tx_store.get_all_unconfirmed() + unconfirmed: list[TransactionRecord] = await env.wallet_state_manager.tx_store.get_all_unconfirmed() raise TimeoutError( f"ENV-{i} TXs not confirmed: {[tx.to_json_dict() for tx in unconfirmed if tx in txs]}" ) diff --git a/chia/_tests/farmer_harvester/test_farmer.py b/chia/_tests/farmer_harvester/test_farmer.py index 2433919df7e8..0cb53e22fa94 100644 --- a/chia/_tests/farmer_harvester/test_farmer.py +++ b/chia/_tests/farmer_harvester/test_farmer.py @@ -5,7 +5,7 @@ from dataclasses import dataclass from time import time from types import TracebackType -from typing import Any, Dict, List, Optional, Tuple, Type, Union, cast +from typing import Any, Optional, Union, cast from unittest.mock import ANY import pytest @@ -39,23 +39,23 @@ class StripOldEntriesCase: - pairs: List[Tuple[float, int]] + pairs: list[tuple[float, int]] before: float - expected_result: List[Tuple[float, int]] + expected_result: list[tuple[float, int]] - def __init__(self, pairs: List[Tuple[float, int]], before: float, expected_result: List[Tuple[float, int]]): + def __init__(self, pairs: list[tuple[float, int]], before: float, expected_result: list[tuple[float, int]]): self.pairs = pairs self.before = before self.expected_result = expected_result class IncrementPoolStatsCase: - pool_states: Dict[bytes32, Any] + pool_states: dict[bytes32, Any] p2_singleton_puzzle_hash: bytes32 name: str current_time: float count: int - value: Optional[Union[int, Dict[str, Any]]] + value: Optional[Union[int, dict[str, Any]]] expected_result: Any def __init__( @@ -64,7 +64,7 @@ def __init__( name: str, current_time: float, count: int, - value: Optional[Union[int, Dict[str, Any]]], + value: Optional[Union[int, dict[str, Any]]], expected_result: Any, ): prepared_p2_singleton_puzzle_hash = std_hash(b"11223344") @@ -142,10 +142,10 @@ class NewProofOfSpaceCase: pool_config: PoolWalletConfig pool_difficulty: Optional[uint64] authentication_token_timeout: Optional[uint8] - farmer_private_keys: List[PrivateKey] - authentication_keys: Dict[bytes32, PrivateKey] + farmer_private_keys: list[PrivateKey] + authentication_keys: dict[bytes32, PrivateKey] use_invalid_peer_response: bool - expected_pool_state: Dict[str, Any] + expected_pool_state: dict[str, Any] marks: Marks = () # This creates a test case whose proof of space passes plot filter and quality check @@ -159,7 +159,7 @@ def create_verified_quality_case( authentication_token_timeout: Optional[uint8], use_invalid_peer_response: bool, has_valid_authentication_keys: bool, - expected_pool_stats: Dict[str, Any], + expected_pool_stats: dict[str, Any], ) -> NewProofOfSpaceCase: p2_singleton_puzzle_hash = bytes32.fromhex("302e05a1e6af431c22043ae2a9a8f71148c955c372697cb8ab348160976283df") pool_config = PoolWalletConfig( @@ -672,7 +672,7 @@ class DummyPoolResponse: new_difficulty: Optional[int] = None async def text(self) -> str: - json_dict: Dict[str, Any] = dict() + json_dict: dict[str, Any] = dict() if self.error_code: json_dict["error_code"] = self.error_code json_dict["error_message"] = self.error_message if self.error_message else "error-msg" @@ -686,14 +686,14 @@ async def __aenter__(self) -> DummyPoolResponse: async def __aexit__( self, - exc_type: Optional[Type[BaseException]], + exc_type: Optional[type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType], ) -> None: pass -def create_valid_pos(farmer: Farmer) -> Tuple[farmer_protocol.NewSignagePoint, ProofOfSpace, NewProofOfSpace]: +def create_valid_pos(farmer: Farmer) -> tuple[farmer_protocol.NewSignagePoint, ProofOfSpace, NewProofOfSpace]: case = NewProofOfSpaceCase.create_verified_quality_case( difficulty=uint64(1), sub_slot_iters=uint64(1000000000000), @@ -764,7 +764,7 @@ def create_valid_pos(farmer: Farmer) -> Tuple[farmer_protocol.NewSignagePoint, P return sp, pos, new_pos -def override_pool_state(overrides: Dict[str, Any]) -> Dict[str, Any]: +def override_pool_state(overrides: dict[str, Any]) -> dict[str, Any]: pool_state = { "points_found_since_start": 0, # Original item format here is (timestamp, value) but we'll ignore timestamp part @@ -793,7 +793,7 @@ def override_pool_state(overrides: Dict[str, Any]) -> Dict[str, Any]: class PoolStateCase: id: str pool_response: DummyPoolResponse - expected_pool_state: Dict[str, Any] + expected_pool_state: dict[str, Any] marks: Marks = () @@ -867,7 +867,7 @@ class PoolStateCase: @pytest.mark.anyio async def test_farmer_pool_response( mocker: MockerFixture, - farmer_one_harvester: Tuple[List[HarvesterService], FarmerService, BlockTools], + farmer_one_harvester: tuple[list[HarvesterService], FarmerService, BlockTools], case: PoolStateCase, ) -> None: _, farmer_service, _ = farmer_one_harvester @@ -927,7 +927,7 @@ def assert_pool_errors_24h() -> None: assert_stats_24h("missing_partials_24h") -def make_pool_list_entry(overrides: Dict[str, Any]) -> Dict[str, Any]: +def make_pool_list_entry(overrides: dict[str, Any]) -> dict[str, Any]: pool_list_entry = { "owner_public_key": "84c3fcf9d5581c1ddc702cb0f3b4a06043303b334dd993ab42b2c320ebfa98e5ce558448615b3f69638ba92cf7f43da5", # noqa: E501 "p2_singleton_puzzle_hash": "302e05a1e6af431c22043ae2a9a8f71148c955c372697cb8ab348160976283df", @@ -941,7 +941,7 @@ def make_pool_list_entry(overrides: Dict[str, Any]) -> Dict[str, Any]: return pool_list_entry -def make_pool_info() -> Dict[str, Any]: +def make_pool_info() -> dict[str, Any]: return { "name": "Pool Name", "description": "Pool Description", @@ -955,7 +955,7 @@ def make_pool_info() -> Dict[str, Any]: } -def make_pool_state(p2_singleton_puzzle_hash: bytes32, overrides: Dict[str, Any]) -> Dict[str, Any]: +def make_pool_state(p2_singleton_puzzle_hash: bytes32, overrides: dict[str, Any]) -> dict[str, Any]: pool_info = { "p2_singleton_puzzle_hash": p2_singleton_puzzle_hash.hex(), "points_found_since_start": 0, @@ -995,8 +995,8 @@ class DummyPoolInfoResponse: ok: bool status: int url: URL - pool_info: Optional[Dict[str, Any]] = None - history: Tuple[DummyClientResponse, ...] = () + pool_info: Optional[dict[str, Any]] = None + history: tuple[DummyClientResponse, ...] = () async def text(self) -> str: if self.pool_info is None: @@ -1009,7 +1009,7 @@ async def __aenter__(self) -> DummyPoolInfoResponse: async def __aexit__( self, - exc_type: Optional[Type[BaseException]], + exc_type: Optional[type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType], ) -> None: @@ -1171,7 +1171,7 @@ def id(self) -> str: @pytest.mark.anyio async def test_farmer_pool_info_config_update( mocker: MockerFixture, - farmer_one_harvester: Tuple[List[HarvesterService], FarmerService, BlockTools], + farmer_one_harvester: tuple[list[HarvesterService], FarmerService, BlockTools], case: PoolInfoCase, ) -> None: _, farmer_service, _ = farmer_one_harvester @@ -1212,7 +1212,7 @@ async def test_farmer_pool_info_config_update( class PartialSubmitHeaderCase(DataCase): _id: str harvester_peer: DummyHarvesterPeer - expected_headers: Dict[str, str] + expected_headers: dict[str, str] marks: Marks = () @property @@ -1236,7 +1236,7 @@ def id(self) -> str: @pytest.mark.anyio async def test_farmer_additional_headers_on_partial_submit( mocker: MockerFixture, - farmer_one_harvester: Tuple[List[HarvesterService], FarmerService, BlockTools], + farmer_one_harvester: tuple[list[HarvesterService], FarmerService, BlockTools], case: PartialSubmitHeaderCase, ) -> None: _, farmer_service, _ = farmer_one_harvester diff --git a/chia/_tests/farmer_harvester/test_farmer_harvester.py b/chia/_tests/farmer_harvester/test_farmer_harvester.py index 3412a5d0006a..acd3850e93db 100644 --- a/chia/_tests/farmer_harvester/test_farmer_harvester.py +++ b/chia/_tests/farmer_harvester/test_farmer_harvester.py @@ -3,7 +3,7 @@ import asyncio from math import floor from pathlib import Path -from typing import Any, Dict, List, Optional, Tuple +from typing import Any, Optional import pytest from chia_rs import G1Element @@ -32,19 +32,19 @@ def farmer_is_started(farmer: Farmer) -> bool: return farmer.started -async def get_harvester_config(harvester_rpc_port: Optional[int], root_path: Path) -> Dict[str, Any]: +async def get_harvester_config(harvester_rpc_port: Optional[int], root_path: Path) -> dict[str, Any]: async with get_any_service_client(HarvesterRpcClient, harvester_rpc_port, root_path) as (harvester_client, _): return await harvester_client.get_harvester_config() -async def update_harvester_config(harvester_rpc_port: Optional[int], root_path: Path, config: Dict[str, Any]) -> bool: +async def update_harvester_config(harvester_rpc_port: Optional[int], root_path: Path, config: dict[str, Any]) -> bool: async with get_any_service_client(HarvesterRpcClient, harvester_rpc_port, root_path) as (harvester_client, _): return await harvester_client.update_harvester_config(config) @pytest.mark.anyio async def test_start_with_empty_keychain( - farmer_one_harvester_not_started: Tuple[List[HarvesterService], FarmerService, BlockTools] + farmer_one_harvester_not_started: tuple[list[HarvesterService], FarmerService, BlockTools] ) -> None: _, farmer_service, bt = farmer_one_harvester_not_started farmer: Farmer = farmer_service._node @@ -67,7 +67,7 @@ async def test_start_with_empty_keychain( @pytest.mark.anyio async def test_harvester_handshake( - farmer_one_harvester_not_started: Tuple[List[HarvesterService], FarmerService, BlockTools] + farmer_one_harvester_not_started: tuple[list[HarvesterService], FarmerService, BlockTools] ) -> None: harvesters, farmer_service, bt = farmer_one_harvester_not_started harvester_service = harvesters[0] @@ -169,7 +169,7 @@ def log_is_ready() -> bool: @pytest.mark.anyio -async def test_harvester_config(farmer_one_harvester: Tuple[List[HarvesterService], FarmerService, BlockTools]) -> None: +async def test_harvester_config(farmer_one_harvester: tuple[list[HarvesterService], FarmerService, BlockTools]) -> None: harvester_services, farmer_service, bt = farmer_one_harvester harvester_service = harvester_services[0] @@ -179,7 +179,7 @@ async def test_harvester_config(farmer_one_harvester: Tuple[List[HarvesterServic harvester_config = await get_harvester_config(harvester_rpc_port, bt.root_path) assert harvester_config["success"] is True - def check_config_match(config1: Dict[str, Any], config2: Dict[str, Any]) -> None: + def check_config_match(config1: dict[str, Any], config2: dict[str, Any]) -> None: assert config1["harvester"]["use_gpu_harvesting"] == config2["use_gpu_harvesting"] assert config1["harvester"]["gpu_index"] == config2["gpu_index"] assert config1["harvester"]["enforce_gpu_index"] == config2["enforce_gpu_index"] @@ -212,13 +212,13 @@ def check_config_match(config1: Dict[str, Any], config2: Dict[str, Any]) -> None @pytest.mark.anyio async def test_missing_signage_point( - farmer_one_harvester: Tuple[List[HarvesterService], FarmerService, BlockTools] + farmer_one_harvester: tuple[list[HarvesterService], FarmerService, BlockTools] ) -> None: _, farmer_service, bt = farmer_one_harvester farmer_api = farmer_service._api farmer = farmer_api.farmer - def create_sp(index: int, challenge_hash: bytes32) -> Tuple[uint64, farmer_protocol.NewSignagePoint]: + def create_sp(index: int, challenge_hash: bytes32) -> tuple[uint64, farmer_protocol.NewSignagePoint]: time = uint64(index + 1) sp = farmer_protocol.NewSignagePoint( challenge_hash, std_hash(b"2"), std_hash(b"3"), uint64(1), uint64(1000000), uint8(index), uint32(1) @@ -271,7 +271,7 @@ def create_sp(index: int, challenge_hash: bytes32) -> Tuple[uint64, farmer_proto assert original_state_changed_callback is not None number_of_missing_sps: uint32 = uint32(0) - def state_changed(change: str, data: Dict[str, Any]) -> None: + def state_changed(change: str, data: dict[str, Any]) -> None: nonlocal number_of_missing_sps number_of_missing_sps = data["missing_signage_points"][1] original_state_changed_callback(change, data) @@ -284,7 +284,7 @@ def state_changed(change: str, data: Dict[str, Any]) -> None: @pytest.mark.anyio async def test_harvester_has_no_server( - farmer_one_harvester: Tuple[List[FarmerService], HarvesterService, BlockTools], + farmer_one_harvester: tuple[list[FarmerService], HarvesterService, BlockTools], ) -> None: harvesters, _, bt = farmer_one_harvester harvester_server = harvesters[0]._server diff --git a/chia/_tests/farmer_harvester/test_filter_prefix_bits.py b/chia/_tests/farmer_harvester/test_filter_prefix_bits.py index aeec03072bce..d5c1d21ef518 100644 --- a/chia/_tests/farmer_harvester/test_filter_prefix_bits.py +++ b/chia/_tests/farmer_harvester/test_filter_prefix_bits.py @@ -1,7 +1,8 @@ from __future__ import annotations +from collections.abc import AsyncIterator from pathlib import Path -from typing import Any, AsyncIterator, Dict, List, Optional, Tuple +from typing import Any, Optional import pytest @@ -31,7 +32,7 @@ argnames=["filter_prefix_bits", "should_pass"], argvalues=[(9, 34), (8, 89), (7, 162), (6, 295), (5, 579)] ) def test_filter_prefix_bits_on_blocks( - default_10000_blocks: List[FullBlock], filter_prefix_bits: uint8, should_pass: int + default_10000_blocks: list[FullBlock], filter_prefix_bits: uint8, should_pass: int ) -> None: passed = 0 for block in default_10000_blocks: @@ -50,7 +51,7 @@ def test_filter_prefix_bits_on_blocks( @pytest.fixture(scope="function") async def farmer_harvester_with_filter_size_9( get_temp_keyring: Keychain, tmp_path: Path, self_hostname: str -) -> AsyncIterator[Tuple[HarvesterService, FarmerAPI]]: +) -> AsyncIterator[tuple[HarvesterService, FarmerAPI]]: async def have_connections() -> bool: return len(await farmer_rpc_cl.get_connections()) > 0 @@ -86,14 +87,14 @@ async def have_connections() -> bool: @pytest.mark.parametrize(argnames=["peak_height", "eligible_plots"], argvalues=[(5495999, 0), (5496000, 1)]) @pytest.mark.anyio async def test_filter_prefix_bits_with_farmer_harvester( - farmer_harvester_with_filter_size_9: Tuple[HarvesterService, FarmerAPI], + farmer_harvester_with_filter_size_9: tuple[HarvesterService, FarmerAPI], peak_height: uint32, eligible_plots: int, ) -> None: state_change = None state_change_data = None - def state_changed_callback(change: str, change_data: Optional[Dict[str, Any]]) -> None: + def state_changed_callback(change: str, change_data: Optional[dict[str, Any]]) -> None: nonlocal state_change, state_change_data state_change = change state_change_data = change_data diff --git a/chia/_tests/farmer_harvester/test_third_party_harvesters.py b/chia/_tests/farmer_harvester/test_third_party_harvesters.py index f1de2794668f..3e18c6f21433 100644 --- a/chia/_tests/farmer_harvester/test_third_party_harvesters.py +++ b/chia/_tests/farmer_harvester/test_third_party_harvesters.py @@ -6,7 +6,7 @@ import json import logging from os.path import dirname -from typing import Any, List, Optional, Tuple, Union, cast +from typing import Any, Optional, Union, cast import pytest from chia_rs import G1Element @@ -45,12 +45,12 @@ from chia.util.ints import uint8, uint32, uint64 SPType = Union[timelord_protocol.NewEndOfSubSlotVDF, timelord_protocol.NewSignagePointVDF] -SPList = List[SPType] +SPList = list[SPType] @pytest.mark.anyio async def test_harvester_receive_source_signing_data( - farmer_harvester_2_simulators_zero_bits_plot_filter: Tuple[ + farmer_harvester_2_simulators_zero_bits_plot_filter: tuple[ FarmerService, HarvesterService, Union[FullNodeService, SimulatorFullNodeService], @@ -91,7 +91,7 @@ async def test_harvester_receive_source_signing_data( await wait_until_node_type_connected(full_node_1.server, NodeType.FULL_NODE) # Prepare test data - blocks: List[FullBlock] + blocks: list[FullBlock] signage_points: SPList (blocks, signage_points) = load_test_data() @@ -256,7 +256,7 @@ def did_finished_validating_data() -> bool: @pytest.mark.anyio async def test_harvester_fee_convention( - farmer_harvester_2_simulators_zero_bits_plot_filter: Tuple[ + farmer_harvester_2_simulators_zero_bits_plot_filter: tuple[ FarmerService, HarvesterService, Union[FullNodeService, SimulatorFullNodeService], @@ -286,7 +286,7 @@ async def test_harvester_fee_convention( @pytest.mark.anyio async def test_harvester_fee_invalid_convention( - farmer_harvester_2_simulators_zero_bits_plot_filter: Tuple[ + farmer_harvester_2_simulators_zero_bits_plot_filter: tuple[ FarmerService, HarvesterService, Union[FullNodeService, SimulatorFullNodeService], @@ -319,7 +319,7 @@ async def test_harvester_fee_invalid_convention( def prepare_sp_and_pos_for_fee_test( fee_threshold_offset: int, -) -> Tuple[farmer_protocol.NewSignagePoint, harvester_protocol.NewProofOfSpace]: +) -> tuple[farmer_protocol.NewSignagePoint, harvester_protocol.NewProofOfSpace]: proof = std_hash(b"1") challenge = std_hash(b"1") @@ -420,7 +420,7 @@ def decode_sp( return timelord_protocol.NewSignagePointVDF.from_bytes(sp_bytes) -async def add_test_blocks_into_full_node(blocks: List[FullBlock], full_node: FullNode) -> None: +async def add_test_blocks_into_full_node(blocks: list[FullBlock], full_node: FullNode) -> None: # Inject full node with a pre-existing block to skip initial genesis sub-slot # so that we have blocks generated that have our farmer reward address, instead # of the GENESIS_PRE_FARM_FARMER_PUZZLE_HASH. @@ -436,7 +436,7 @@ async def add_test_blocks_into_full_node(blocks: List[FullBlock], full_node: Ful prev_ses_block = curr new_slot = len(block.finished_sub_slots) > 0 ssi, diff = get_next_sub_slot_iters_and_difficulty(full_node.constants, new_slot, prev_b, full_node.blockchain) - pre_validation_results: List[PreValidationResult] = await pre_validate_blocks_multiprocessing( + pre_validation_results: list[PreValidationResult] = await pre_validate_blocks_multiprocessing( full_node.blockchain.constants, full_node.blockchain, blocks, @@ -490,7 +490,7 @@ async def inject_signage_points(signage_points: SPList, full_node_1: FullNode, f # A FullBlock is also included which is infused already in the chain so # that the next NewEndOfSubSlotVDF is valid. # This block has to be added to the test FullNode before injecting the signage points. -def load_test_data() -> Tuple[List[FullBlock], SPList]: +def load_test_data() -> tuple[list[FullBlock], SPList]: file_path: str = dirname(__file__) + "/test_third_party_harvesters_data.json" with open(file_path) as f: data = json.load(f) diff --git a/chia/_tests/fee_estimation/test_fee_estimation_integration.py b/chia/_tests/fee_estimation/test_fee_estimation_integration.py index 80129ecf75e4..6dccfec3a2ef 100644 --- a/chia/_tests/fee_estimation/test_fee_estimation_integration.py +++ b/chia/_tests/fee_estimation/test_fee_estimation_integration.py @@ -1,7 +1,6 @@ from __future__ import annotations import types -from typing import Dict, List import pytest from chia_rs import Coin @@ -41,7 +40,7 @@ def make_mempoolitem() -> MempoolItem: block_height = 1 fee = uint64(10000000) - spends: List[SpendConditions] = [] + spends: list[SpendConditions] = [] conds = SpendBundleConditions(spends, 0, 0, 0, None, None, [], cost, 0, 0, False) mempool_item = MempoolItem( spend_bundle, @@ -125,7 +124,7 @@ def test_item_not_removed_if_not_added() -> None: def test_mempool_fee_estimator_remove_item() -> None: - should_call_fee_estimator_remove: Dict[MempoolRemoveReason, int] = { + should_call_fee_estimator_remove: dict[MempoolRemoveReason, int] = { MempoolRemoveReason.BLOCK_INCLUSION: 0, MempoolRemoveReason.CONFLICT: 1, MempoolRemoveReason.POOL_FULL: 1, @@ -188,7 +187,7 @@ def test_current_block_height_new_block() -> None: fee_estimator = FeeEstimatorInterfaceIntegrationVerificationObject() mempool = Mempool(test_mempool_info, fee_estimator) height = uint32(10) - included_items: List[MempoolItemInfo] = [] + included_items: list[MempoolItemInfo] = [] mempool.fee_estimator.new_block(FeeBlockInfo(height, included_items)) assert mempool.fee_estimator.current_block_height == height # type: ignore[attr-defined] @@ -197,7 +196,7 @@ def test_current_block_height_new_height_then_new_block() -> None: fee_estimator = FeeEstimatorInterfaceIntegrationVerificationObject() mempool = Mempool(test_mempool_info, fee_estimator) height = uint32(11) - included_items: List[MempoolItemInfo] = [] + included_items: list[MempoolItemInfo] = [] fee_estimator.new_block_height(uint32(height - 1)) mempool.fee_estimator.new_block(FeeBlockInfo(height, included_items)) assert mempool.fee_estimator.current_block_height == height # type: ignore[attr-defined] @@ -207,7 +206,7 @@ def test_current_block_height_new_block_then_new_height() -> None: fee_estimator = FeeEstimatorInterfaceIntegrationVerificationObject() mempool = Mempool(test_mempool_info, fee_estimator) height = uint32(12) - included_items: List[MempoolItemInfo] = [] + included_items: list[MempoolItemInfo] = [] fee_estimator.new_block_height(uint32(height - 1)) mempool.fee_estimator.new_block(FeeBlockInfo(height, included_items)) fee_estimator.new_block_height(uint32(height + 1)) diff --git a/chia/_tests/fee_estimation/test_fee_estimation_rpc.py b/chia/_tests/fee_estimation/test_fee_estimation_rpc.py index de95c8f2b986..fea36c8be9f6 100644 --- a/chia/_tests/fee_estimation/test_fee_estimation_rpc.py +++ b/chia/_tests/fee_estimation/test_fee_estimation_rpc.py @@ -1,7 +1,7 @@ from __future__ import annotations import re -from typing import Any, List, Tuple +from typing import Any import pytest @@ -21,8 +21,8 @@ @pytest.fixture(scope="function") async def setup_node_and_rpc( - two_wallet_nodes_services: Tuple[List[SimulatorFullNodeService], List[WalletService], BlockTools], -) -> Tuple[FullNodeRpcClient, FullNodeRpcApi]: + two_wallet_nodes_services: tuple[list[SimulatorFullNodeService], list[WalletService], BlockTools], +) -> tuple[FullNodeRpcClient, FullNodeRpcApi]: full_nodes, wallets, bt = two_wallet_nodes_services wallet = wallets[0]._node.wallet_state_manager.main_wallet full_node_apis = [full_node_service._api for full_node_service in full_nodes] @@ -47,8 +47,8 @@ async def setup_node_and_rpc( @pytest.fixture(scope="function") async def one_node_no_blocks( - one_node: Tuple[List[SimulatorFullNodeService], List[WalletService], BlockTools] -) -> Tuple[FullNodeRpcClient, FullNodeRpcApi]: + one_node: tuple[list[SimulatorFullNodeService], list[WalletService], BlockTools] +) -> tuple[FullNodeRpcClient, FullNodeRpcApi]: full_nodes, wallets, bt = one_node full_node_apis = [full_node_service._api for full_node_service in full_nodes] full_node_api: FullNodeSimulator = full_node_apis[0] @@ -66,7 +66,7 @@ async def one_node_no_blocks( @pytest.mark.anyio -async def test_get_blockchain_state(setup_node_and_rpc: Tuple[FullNodeRpcClient, FullNodeRpcApi]) -> None: +async def test_get_blockchain_state(setup_node_and_rpc: tuple[FullNodeRpcClient, FullNodeRpcApi]) -> None: # Confirm full node setup correctly client, _ = setup_node_and_rpc response = await client.get_blockchain_state() @@ -74,7 +74,7 @@ async def test_get_blockchain_state(setup_node_and_rpc: Tuple[FullNodeRpcClient, @pytest.mark.anyio -async def test_empty_request(setup_node_and_rpc: Tuple[FullNodeRpcClient, FullNodeRpcApi]) -> None: +async def test_empty_request(setup_node_and_rpc: tuple[FullNodeRpcClient, FullNodeRpcApi]) -> None: client, full_node_rpc_api = setup_node_and_rpc with pytest.raises(ValueError): @@ -82,7 +82,7 @@ async def test_empty_request(setup_node_and_rpc: Tuple[FullNodeRpcClient, FullNo @pytest.mark.anyio -async def test_empty_peak(one_node_no_blocks: Tuple[FullNodeRpcClient, FullNodeRpcApi]) -> None: +async def test_empty_peak(one_node_no_blocks: tuple[FullNodeRpcClient, FullNodeRpcApi]) -> None: client, full_node_rpc_api = one_node_no_blocks response = await full_node_rpc_api.get_fee_estimate({"target_times": [], "cost": 1}) del response["node_time_utc"] @@ -105,56 +105,56 @@ async def test_empty_peak(one_node_no_blocks: Tuple[FullNodeRpcClient, FullNodeR @pytest.mark.anyio -async def test_no_target_times(setup_node_and_rpc: Tuple[FullNodeRpcClient, FullNodeRpcApi]) -> None: +async def test_no_target_times(setup_node_and_rpc: tuple[FullNodeRpcClient, FullNodeRpcApi]) -> None: client, full_node_rpc_api = setup_node_and_rpc with pytest.raises(ValueError): await full_node_rpc_api.get_fee_estimate({"cost": 1}) @pytest.mark.anyio -async def test_negative_time(setup_node_and_rpc: Tuple[FullNodeRpcClient, FullNodeRpcApi]) -> None: +async def test_negative_time(setup_node_and_rpc: tuple[FullNodeRpcClient, FullNodeRpcApi]) -> None: client, full_node_rpc_api = setup_node_and_rpc with pytest.raises(ValueError): await full_node_rpc_api.get_fee_estimate({"cost": 1, "target_times": [-1]}) @pytest.mark.anyio -async def test_negative_cost(setup_node_and_rpc: Tuple[FullNodeRpcClient, FullNodeRpcApi]) -> None: +async def test_negative_cost(setup_node_and_rpc: tuple[FullNodeRpcClient, FullNodeRpcApi]) -> None: client, full_node_rpc_api = setup_node_and_rpc with pytest.raises(ValueError): await full_node_rpc_api.get_fee_estimate({"cost": -1, "target_times": [1]}) @pytest.mark.anyio -async def test_no_cost_or_tx(setup_node_and_rpc: Tuple[FullNodeRpcClient, FullNodeRpcApi]) -> None: +async def test_no_cost_or_tx(setup_node_and_rpc: tuple[FullNodeRpcClient, FullNodeRpcApi]) -> None: client, full_node_rpc_api = setup_node_and_rpc with pytest.raises(ValueError): await full_node_rpc_api.get_fee_estimate({"target_times": []}) @pytest.mark.anyio -async def test_both_cost_and_tx(setup_node_and_rpc: Tuple[FullNodeRpcClient, FullNodeRpcApi]) -> None: +async def test_both_cost_and_tx(setup_node_and_rpc: tuple[FullNodeRpcClient, FullNodeRpcApi]) -> None: client, full_node_rpc_api = setup_node_and_rpc with pytest.raises(ValueError): await full_node_rpc_api.get_fee_estimate({"target_times": [], "cost": 1, "spend_bundle": "80"}) @pytest.mark.anyio -async def test_target_times_invalid_type(setup_node_and_rpc: Tuple[FullNodeRpcClient, FullNodeRpcApi]) -> None: +async def test_target_times_invalid_type(setup_node_and_rpc: tuple[FullNodeRpcClient, FullNodeRpcApi]) -> None: client, full_node_rpc_api = setup_node_and_rpc with pytest.raises(TypeError): await full_node_rpc_api.get_fee_estimate({"target_times": 1, "cost": 1}) @pytest.mark.anyio -async def test_cost_invalid_type(setup_node_and_rpc: Tuple[FullNodeRpcClient, FullNodeRpcApi]) -> None: +async def test_cost_invalid_type(setup_node_and_rpc: tuple[FullNodeRpcClient, FullNodeRpcApi]) -> None: client, full_node_rpc_api = setup_node_and_rpc with pytest.raises(ValueError): await full_node_rpc_api.get_fee_estimate({"target_times": [], "cost": "a lot"}) @pytest.mark.anyio -async def test_tx_invalid_type(setup_node_and_rpc: Tuple[FullNodeRpcClient, FullNodeRpcApi]) -> None: +async def test_tx_invalid_type(setup_node_and_rpc: tuple[FullNodeRpcClient, FullNodeRpcApi]) -> None: client, full_node_rpc_api = setup_node_and_rpc with pytest.raises(TypeError): await full_node_rpc_api.get_fee_estimate({"target_times": [], "spend_bundle": {"coin_spends": 1}}) @@ -164,7 +164,7 @@ async def test_tx_invalid_type(setup_node_and_rpc: Tuple[FullNodeRpcClient, Full @pytest.mark.anyio -async def test_empty_target_times(setup_node_and_rpc: Tuple[FullNodeRpcClient, FullNodeRpcApi]) -> None: +async def test_empty_target_times(setup_node_and_rpc: tuple[FullNodeRpcClient, FullNodeRpcApi]) -> None: client, full_node_rpc_api = setup_node_and_rpc response = await full_node_rpc_api.get_fee_estimate({"target_times": [], "cost": 1}) assert response["estimates"] == [] @@ -172,7 +172,7 @@ async def test_empty_target_times(setup_node_and_rpc: Tuple[FullNodeRpcClient, F @pytest.mark.anyio -async def test_cost(setup_node_and_rpc: Tuple[FullNodeRpcClient, FullNodeRpcApi]) -> None: +async def test_cost(setup_node_and_rpc: tuple[FullNodeRpcClient, FullNodeRpcApi]) -> None: client, full_node_rpc_api = setup_node_and_rpc response = await full_node_rpc_api.get_fee_estimate({"target_times": [1], "cost": 1}) assert response["estimates"] == [0] @@ -180,7 +180,7 @@ async def test_cost(setup_node_and_rpc: Tuple[FullNodeRpcClient, FullNodeRpcApi] @pytest.mark.anyio -async def test_tx(setup_node_and_rpc: Tuple[FullNodeRpcClient, FullNodeRpcApi], bt: BlockTools) -> None: +async def test_tx(setup_node_and_rpc: tuple[FullNodeRpcClient, FullNodeRpcApi], bt: BlockTools) -> None: client, full_node_rpc_api = setup_node_and_rpc wallet_a: WalletTool = bt.get_pool_wallet_tool() my_puzzle_hash = wallet_a.get_new_puzzlehash() @@ -197,7 +197,7 @@ async def test_tx(setup_node_and_rpc: Tuple[FullNodeRpcClient, FullNodeRpcApi], @pytest.mark.anyio -async def test_multiple(setup_node_and_rpc: Tuple[FullNodeRpcClient, FullNodeRpcApi]) -> None: +async def test_multiple(setup_node_and_rpc: tuple[FullNodeRpcClient, FullNodeRpcApi]) -> None: client, full_node_rpc_api = setup_node_and_rpc response = await full_node_rpc_api.get_fee_estimate({"target_times": [1, 5, 10, 15, 60, 120, 180, 240], "cost": 1}) assert response["estimates"] == [0, 0, 0, 0, 0, 0, 0, 0] @@ -214,11 +214,11 @@ def get_test_spendbundle(bt: BlockTools) -> SpendBundle: @pytest.mark.anyio async def test_validate_fee_estimate_cost_err( - setup_node_and_rpc: Tuple[FullNodeRpcClient, FullNodeRpcApi], bt: BlockTools + setup_node_and_rpc: tuple[FullNodeRpcClient, FullNodeRpcApi], bt: BlockTools ) -> None: spend_bundle = get_test_spendbundle(bt) client, full_node_rpc_api = setup_node_and_rpc - bad_arglist: List[List[Any]] = [ + bad_arglist: list[list[Any]] = [ [["foo", "bar"]], [["spend_bundle", spend_bundle.to_json_dict()], ["cost", 1]], [["spend_bundle", spend_bundle.to_json_dict()], ["spend_type", "send_xch_transaction"]], @@ -239,12 +239,12 @@ async def test_validate_fee_estimate_cost_err( @pytest.mark.anyio async def test_validate_fee_estimate_cost_ok( - setup_node_and_rpc: Tuple[FullNodeRpcClient, FullNodeRpcApi], bt: BlockTools + setup_node_and_rpc: tuple[FullNodeRpcClient, FullNodeRpcApi], bt: BlockTools ) -> None: spend_bundle = get_test_spendbundle(bt) client, full_node_rpc_api = setup_node_and_rpc - good_arglist: List[List[Any]] = [ + good_arglist: list[list[Any]] = [ ["spend_bundle", spend_bundle.to_json_dict()], ["cost", 1], ["spend_type", "send_xch_transaction"], @@ -257,7 +257,7 @@ async def test_validate_fee_estimate_cost_ok( @pytest.mark.anyio async def test_get_spendbundle_type_cost_missing( - setup_node_and_rpc: Tuple[FullNodeRpcClient, FullNodeRpcApi], bt: BlockTools + setup_node_and_rpc: tuple[FullNodeRpcClient, FullNodeRpcApi], bt: BlockTools ) -> None: client, full_node_rpc_api = setup_node_and_rpc with pytest.raises(KeyError, match=re.escape("INVALID")): @@ -267,7 +267,7 @@ async def test_get_spendbundle_type_cost_missing( @pytest.mark.anyio async def test_get_spendbundle_type_cost_spend_count_ok( - setup_node_and_rpc: Tuple[FullNodeRpcClient, FullNodeRpcApi], bt: BlockTools + setup_node_and_rpc: tuple[FullNodeRpcClient, FullNodeRpcApi], bt: BlockTools ) -> None: client, full_node_rpc_api = setup_node_and_rpc spend_counts = [0, 1, 2] @@ -279,7 +279,7 @@ async def test_get_spendbundle_type_cost_spend_count_ok( @pytest.mark.anyio async def test_get_spendbundle_type_cost_spend_count_bad( - setup_node_and_rpc: Tuple[FullNodeRpcClient, FullNodeRpcApi], bt: BlockTools + setup_node_and_rpc: tuple[FullNodeRpcClient, FullNodeRpcApi], bt: BlockTools ) -> None: client, full_node_rpc_api = setup_node_and_rpc with pytest.raises(ValueError): diff --git a/chia/_tests/fee_estimation/test_fee_estimation_unit_tests.py b/chia/_tests/fee_estimation/test_fee_estimation_unit_tests.py index e94b08f3a4c4..9eb494098a27 100644 --- a/chia/_tests/fee_estimation/test_fee_estimation_unit_tests.py +++ b/chia/_tests/fee_estimation/test_fee_estimation_unit_tests.py @@ -1,7 +1,6 @@ from __future__ import annotations import logging -from typing import List import pytest @@ -47,7 +46,7 @@ def test_single_estimate() -> None: def make_block( height: uint32, num_tx: int, cost: uint64, fee: uint64, num_blocks_wait_in_mempool: int -) -> List[MempoolItemInfo]: +) -> list[MempoolItemInfo]: block_included = uint32(height - num_blocks_wait_in_mempool) return [MempoolItemInfo(cost, fee, block_included)] * num_tx @@ -95,7 +94,7 @@ def test_init_buckets() -> None: def test_get_bucket_index_empty_buckets() -> None: - buckets: List[float] = [] + buckets: list[float] = [] for rate in [0.5, 1.0, 2.0]: with pytest.raises(RuntimeError): a = get_bucket_index(buckets, rate) @@ -134,12 +133,12 @@ def test_get_bucket_index() -> None: def test_monotonically_decrease() -> None: - inputs: List[List[float]] - output: List[List[float]] + inputs: list[list[float]] + output: list[list[float]] inputs = [[], [-1], [0], [1], [0, 0], [0, 1], [1, 0], [1, 2, 3], [1, 1, 1], [3, 2, 1], [3, 3, 1], [1, 3, 3]] output = [[], [-1], [0], [1], [0, 0], [0, 0], [1, 0], [1, 1, 1], [1, 1, 1], [3, 2, 1], [3, 3, 1], [1, 1, 1]] - i: List[float] - o: List[float] + i: list[float] + o: list[float] for i, o in zip(inputs, output): print(o, i) assert o == make_monotonically_decreasing(i) diff --git a/chia/_tests/fee_estimation/test_mempoolitem_height_added.py b/chia/_tests/fee_estimation/test_mempoolitem_height_added.py index 046ffba6524a..c883d65e5c39 100644 --- a/chia/_tests/fee_estimation/test_mempoolitem_height_added.py +++ b/chia/_tests/fee_estimation/test_mempoolitem_height_added.py @@ -1,7 +1,7 @@ from __future__ import annotations import logging -from typing import Callable, List, Optional, Tuple +from typing import Callable, Optional import pytest from chia_rs import Coin, G2Element @@ -32,7 +32,7 @@ async def farm( sim: SpendSim, puzzle_hash: bytes32, item_inclusion_filter: Optional[Callable[[bytes32], bool]] = None, -) -> Tuple[List[Coin], List[Coin], List[Coin]]: +) -> tuple[list[Coin], list[Coin], list[Coin]]: additions, removals = await sim.farm_block(puzzle_hash) # , item_inclusion_filter) height = sim.get_height() new_reward_coins = sim.block_records[height].reward_claims_incorporated @@ -51,7 +51,7 @@ def make_tx_sb(from_coin: Coin) -> SpendBundle: async def init_test( sim: SpendSim, cli: SimClient, puzzle_hash: bytes32, spends_per_block: int -) -> Tuple[BitcoinFeeEstimator, List[Coin], List[Coin]]: +) -> tuple[BitcoinFeeEstimator, list[Coin], list[Coin]]: new_reward_coins = [] spend_coins = [] fee_coins = [] diff --git a/chia/_tests/generator/test_compression.py b/chia/_tests/generator/test_compression.py index 216e965ff26e..e313c942f9be 100644 --- a/chia/_tests/generator/test_compression.py +++ b/chia/_tests/generator/test_compression.py @@ -3,7 +3,7 @@ import io from dataclasses import dataclass -from typing import Any, List +from typing import Any import pytest from chia_rs import ALLOW_BACKREFS @@ -58,7 +58,7 @@ assert serialized_length(gen2) == len(gen2) -def spend_bundle_to_coin_spend_entry_list(bundle: SpendBundle) -> List[Any]: +def spend_bundle_to_coin_spend_entry_list(bundle: SpendBundle) -> list[Any]: r = [] for coin_spend in bundle.coin_spends: entry = [ diff --git a/chia/_tests/generator/test_rom.py b/chia/_tests/generator/test_rom.py index 410393050f8b..8155c54f6377 100644 --- a/chia/_tests/generator/test_rom.py +++ b/chia/_tests/generator/test_rom.py @@ -1,7 +1,5 @@ from __future__ import annotations -from typing import List, Tuple - from clvm.CLVMObject import CLVMStorage from clvm_tools import binutils from clvm_tools.clvmc import compile_clvm_text @@ -78,13 +76,13 @@ def block_generator() -> BlockGenerator: ) -def run_generator(self: BlockGenerator) -> Tuple[int, Program]: +def run_generator(self: BlockGenerator) -> tuple[int, Program]: """This mode is meant for accepting possibly soft-forked transactions into the mempool""" args = Program.to([self.generator_refs]) return GENERATOR_MOD.run_with_cost(MAX_COST, [self.program, args]) -def as_atom_list(prg: CLVMStorage) -> List[bytes]: +def as_atom_list(prg: CLVMStorage) -> list[bytes]: """ Pretend `prg` is a list of atoms. Return the corresponding python list of atoms. diff --git a/chia/_tests/plot_sync/test_delta.py b/chia/_tests/plot_sync/test_delta.py index f3c980411df8..5b0246a8b51f 100644 --- a/chia/_tests/plot_sync/test_delta.py +++ b/chia/_tests/plot_sync/test_delta.py @@ -1,7 +1,6 @@ from __future__ import annotations import logging -from typing import List import pytest from chia_rs import G1Element @@ -79,13 +78,13 @@ def test_list_delta(delta: DeltaType) -> None: [["-2", "2", "3", "-1"], ["2", "3"], PathListDelta([], ["-2", "-1"])], ], ) -def test_path_list_delta_from_lists(old: List[str], new: List[str], result: PathListDelta) -> None: +def test_path_list_delta_from_lists(old: list[str], new: list[str], result: PathListDelta) -> None: assert PathListDelta.from_lists(old, new) == result def test_delta_empty() -> None: delta: Delta = Delta() - all_deltas: List[DeltaType] = [delta.valid, delta.invalid, delta.keys_missing, delta.duplicates] + all_deltas: list[DeltaType] = [delta.valid, delta.invalid, delta.keys_missing, delta.duplicates] assert delta.empty() for d1 in all_deltas: delta.valid.additions["0"] = dummy_plot("0") diff --git a/chia/_tests/plot_sync/test_plot_sync.py b/chia/_tests/plot_sync/test_plot_sync.py index 22f7866dc9d7..eec0f086553d 100644 --- a/chia/_tests/plot_sync/test_plot_sync.py +++ b/chia/_tests/plot_sync/test_plot_sync.py @@ -3,10 +3,11 @@ import asyncio import contextlib import functools +from collections.abc import AsyncIterator from dataclasses import dataclass, field, replace from pathlib import Path from shutil import copy -from typing import Any, AsyncIterator, Callable, List, Optional, Tuple +from typing import Any, Callable, Optional import pytest from chia_rs import G1Element @@ -43,7 +44,7 @@ def synced(sender: Sender, receiver: Receiver, previous_last_sync_id: int) -> bo ) -def assert_path_list_matches(expected_list: List[str], actual_list: List[str]) -> None: +def assert_path_list_matches(expected_list: list[str], actual_list: list[str]) -> None: assert len(expected_list) == len(actual_list) for item in expected_list: assert str(item) in actual_list @@ -61,7 +62,7 @@ class ExpectedResult: duplicates_delta: PathListDelta = field(default_factory=PathListDelta) callback_passed: bool = False - def add_valid(self, list_plots: List[MockPlotInfo]) -> None: + def add_valid(self, list_plots: list[MockPlotInfo]) -> None: def create_mock_plot(info: MockPlotInfo) -> Plot: return Plot( info.prover.get_filename(), @@ -78,31 +79,31 @@ def create_mock_plot(info: MockPlotInfo) -> Plot: self.valid_count += len(list_plots) self.valid_delta.additions.update({x.prover.get_filename(): create_mock_plot(x) for x in list_plots}) - def remove_valid(self, list_paths: List[Path]) -> None: + def remove_valid(self, list_paths: list[Path]) -> None: self.valid_count -= len(list_paths) self.valid_delta.removals += [str(x) for x in list_paths] - def add_invalid(self, list_paths: List[Path]) -> None: + def add_invalid(self, list_paths: list[Path]) -> None: self.invalid_count += len(list_paths) self.invalid_delta.additions += [str(x) for x in list_paths] - def remove_invalid(self, list_paths: List[Path]) -> None: + def remove_invalid(self, list_paths: list[Path]) -> None: self.invalid_count -= len(list_paths) self.invalid_delta.removals += [str(x) for x in list_paths] - def add_keys_missing(self, list_paths: List[Path]) -> None: + def add_keys_missing(self, list_paths: list[Path]) -> None: self.keys_missing_count += len(list_paths) self.keys_missing_delta.additions += [str(x) for x in list_paths] - def remove_keys_missing(self, list_paths: List[Path]) -> None: + def remove_keys_missing(self, list_paths: list[Path]) -> None: self.keys_missing_count -= len(list_paths) self.keys_missing_delta.removals += [str(x) for x in list_paths] - def add_duplicates(self, list_paths: List[Path]) -> None: + def add_duplicates(self, list_paths: list[Path]) -> None: self.duplicates_count += len(list_paths) self.duplicates_delta.additions += [str(x) for x in list_paths] - def remove_duplicates(self, list_paths: List[Path]) -> None: + def remove_duplicates(self, list_paths: list[Path]) -> None: self.duplicates_count -= len(list_paths) self.duplicates_delta.removals += [str(x) for x in list_paths] @@ -110,9 +111,9 @@ def remove_duplicates(self, list_paths: List[Path]) -> None: @dataclass class Environment: root_path: Path - harvester_services: List[HarvesterService] + harvester_services: list[HarvesterService] farmer_service: FarmerService - harvesters: List[Harvester] + harvesters: list[Harvester] farmer: Farmer dir_1: Directory dir_2: Directory @@ -121,9 +122,9 @@ class Environment: dir_invalid: Directory dir_keys_missing: Directory dir_duplicates: Directory - expected: List[ExpectedResult] + expected: list[ExpectedResult] split_farmer_service_manager: SplitAsyncManager[FarmerService] - split_harvester_managers: List[SplitAsyncManager[Harvester]] + split_harvester_managers: list[SplitAsyncManager[Harvester]] def get_harvester(self, peer_id: bytes32) -> Optional[Harvester]: for harvester in self.harvesters: @@ -218,7 +219,7 @@ async def plot_sync_callback(self, peer_id: bytes32, delta: Optional[Delta]) -> async def run_sync_test(self) -> None: plot_manager: PlotManager assert len(self.harvesters) == len(self.expected) - last_sync_ids: List[uint64] = [] + last_sync_ids: list[uint64] = [] # Run the test in two steps, first trigger the refresh on both harvesters for harvester in self.harvesters: plot_manager = harvester.plot_manager @@ -279,16 +280,16 @@ async def handshake_done(self, index: int) -> bool: @pytest.fixture(scope="function") async def environment( tmp_path: Path, - farmer_two_harvester_not_started: Tuple[List[HarvesterService], FarmerService, BlockTools], + farmer_two_harvester_not_started: tuple[list[HarvesterService], FarmerService, BlockTools], ) -> AsyncIterator[Environment]: - def new_test_dir(name: str, plot_list: List[Path]) -> Directory: + def new_test_dir(name: str, plot_list: list[Path]) -> Directory: return Directory(tmp_path / "plots" / name, plot_list) - plots: List[Path] = get_test_plots() - plots_invalid: List[Path] = get_test_plots()[0:3] - plots_keys_missing: List[Path] = get_test_plots("not_in_keychain") + plots: list[Path] = get_test_plots() + plots_invalid: list[Path] = get_test_plots()[0:3] + plots_keys_missing: list[Path] = get_test_plots("not_in_keychain") # Create 4 directories where: dir_n contains n plots - directories: List[Directory] = [] + directories: list[Directory] = [] offset: int = 0 while len(directories) < 4: dir_number = len(directories) + 1 @@ -535,7 +536,7 @@ async def test_farmer_restart(environment: Environment) -> None: env: Environment = environment # Load all directories for both harvesters await add_and_validate_all_directories(env) - last_sync_ids: List[uint64] = [] + last_sync_ids: list[uint64] = [] for i in range(0, len(env.harvesters)): last_sync_ids.append(env.harvesters[i].plot_sync_sender._last_sync_id) # Stop the farmer and make sure both receivers get dropped and refreshing gets stopped on the harvesters @@ -569,7 +570,7 @@ async def test_farmer_restart(environment: Environment) -> None: @pytest.mark.anyio async def test_sync_start_and_disconnect_while_sync_is_active( - farmer_one_harvester: Tuple[List[HarvesterService], FarmerService, BlockTools] + farmer_one_harvester: tuple[list[HarvesterService], FarmerService, BlockTools] ) -> None: harvesters, farmer_service, _ = farmer_one_harvester harvester_service = harvesters[0] diff --git a/chia/_tests/plot_sync/test_receiver.py b/chia/_tests/plot_sync/test_receiver.py index 7920d128824e..ec3ec8383094 100644 --- a/chia/_tests/plot_sync/test_receiver.py +++ b/chia/_tests/plot_sync/test_receiver.py @@ -4,7 +4,7 @@ import logging import random import time -from typing import Any, Callable, List, Tuple, Type, Union +from typing import Any, Callable, Union import pytest from chia_rs import G1Element @@ -57,7 +57,7 @@ class SyncStepData: args: Any def __init__( - self, state: State, function: Callable[[_T_Streamable], Any], payload_type: Type[_T_Streamable], *args: Any + self, state: State, function: Callable[[_T_Streamable], Any], payload_type: type[_T_Streamable], *args: Any ) -> None: self.state = state self.function = function @@ -89,7 +89,7 @@ def assert_error_response(plot_sync: Receiver, error_code: ErrorCodes) -> None: assert response.error.code == error_code.value -def pre_function_validate(receiver: Receiver, data: Union[List[Plot], List[str]], expected_state: State) -> None: +def pre_function_validate(receiver: Receiver, data: Union[list[Plot], list[str]], expected_state: State) -> None: if expected_state == State.loaded: for plot_info in data: assert type(plot_info) is Plot @@ -108,7 +108,7 @@ def pre_function_validate(receiver: Receiver, data: Union[List[Plot], List[str]] assert path not in receiver.duplicates() -def post_function_validate(receiver: Receiver, data: Union[List[Plot], List[str]], expected_state: State) -> None: +def post_function_validate(receiver: Receiver, data: Union[list[Plot], list[str]], expected_state: State) -> None: if expected_state == State.loaded: for plot_info in data: assert type(plot_info) is Plot @@ -161,7 +161,7 @@ async def run_sync_step(receiver: Receiver, sync_step: SyncStepData) -> None: assert receiver._last_sync.time_done == last_sync_time_before -def plot_sync_setup(seeded_random: random.Random) -> Tuple[Receiver, List[SyncStepData]]: +def plot_sync_setup(seeded_random: random.Random) -> tuple[Receiver, list[SyncStepData]]: harvester_connection = get_dummy_connection(NodeType.HARVESTER, bytes32.random(seeded_random)) receiver = Receiver(harvester_connection, dummy_callback) # type:ignore[arg-type] @@ -188,7 +188,7 @@ def plot_sync_setup(seeded_random: random.Random) -> Tuple[Receiver, List[SyncSt receiver._total_effective_plot_size = int( sum(UI_ACTUAL_SPACE_CONSTANT_FACTOR * int(_expected_plot_size(plot.size)) for plot in receiver.plots().values()) ) - sync_steps: List[SyncStepData] = [ + sync_steps: list[SyncStepData] = [ SyncStepData( State.idle, receiver.sync_started, diff --git a/chia/_tests/plot_sync/test_sync_simulated.py b/chia/_tests/plot_sync/test_sync_simulated.py index c609c2e4b07d..736da9276af7 100644 --- a/chia/_tests/plot_sync/test_sync_simulated.py +++ b/chia/_tests/plot_sync/test_sync_simulated.py @@ -6,10 +6,11 @@ import logging import random import time +from collections.abc import AsyncIterator from dataclasses import dataclass, field from enum import Enum from pathlib import Path -from typing import Any, AsyncIterator, Dict, List, Optional, Set, Tuple +from typing import Any, Optional import pytest from chia_rs import G1Element @@ -51,19 +52,19 @@ class TestData: plot_sync_sender: Sender plot_sync_receiver: Receiver event_loop: asyncio.AbstractEventLoop - plots: Dict[Path, PlotInfo] = field(default_factory=dict) - invalid: List[PlotInfo] = field(default_factory=list) - keys_missing: List[PlotInfo] = field(default_factory=list) - duplicates: List[PlotInfo] = field(default_factory=list) + plots: dict[Path, PlotInfo] = field(default_factory=dict) + invalid: list[PlotInfo] = field(default_factory=list) + keys_missing: list[PlotInfo] = field(default_factory=list) + duplicates: list[PlotInfo] = field(default_factory=list) async def run( self, *, - loaded: List[PlotInfo], - removed: List[PlotInfo], - invalid: List[PlotInfo], - keys_missing: List[PlotInfo], - duplicates: List[PlotInfo], + loaded: list[PlotInfo], + removed: list[PlotInfo], + invalid: list[PlotInfo], + keys_missing: list[PlotInfo], + duplicates: list[PlotInfo], initial: bool, ) -> None: for plot_info in loaded: @@ -75,10 +76,10 @@ async def run( self.keys_missing = keys_missing self.duplicates = duplicates - removed_paths: List[Path] = [p.prover.get_filename() for p in removed] if removed is not None else [] - invalid_dict: Dict[Path, int] = {p.prover.get_filename(): 0 for p in self.invalid} - keys_missing_set: Set[Path] = {p.prover.get_filename() for p in self.keys_missing} - duplicates_set: Set[str] = {p.prover.get_filename() for p in self.duplicates} + removed_paths: list[Path] = [p.prover.get_filename() for p in removed] if removed is not None else [] + invalid_dict: dict[Path, int] = {p.prover.get_filename(): 0 for p in self.invalid} + keys_missing_set: set[Path] = {p.prover.get_filename() for p in self.keys_missing} + duplicates_set: set[str] = {p.prover.get_filename() for p in self.duplicates} # Inject invalid plots into `PlotManager` of the harvester so that the callback calls below can use them # to sync them to the farmer. @@ -158,10 +159,10 @@ def validate_plot_sync(self) -> None: @dataclass class TestRunner: - test_data: List[TestData] + test_data: list[TestData] def __init__( - self, harvesters: List[Harvester], farmer: Farmer, event_loop: asyncio.events.AbstractEventLoop + self, harvesters: list[Harvester], farmer: Farmer, event_loop: asyncio.events.AbstractEventLoop ) -> None: self.test_data = [] for harvester in harvesters: @@ -179,11 +180,11 @@ async def run( self, index: int, *, - loaded: List[PlotInfo], - removed: List[PlotInfo], - invalid: List[PlotInfo], - keys_missing: List[PlotInfo], - duplicates: List[PlotInfo], + loaded: list[PlotInfo], + removed: list[PlotInfo], + invalid: list[PlotInfo], + keys_missing: list[PlotInfo], + duplicates: list[PlotInfo], initial: bool, ) -> None: await self.test_data[index].run( @@ -241,7 +242,7 @@ async def _testable_process( @contextlib.asynccontextmanager async def create_test_runner( - harvester_services: List[HarvesterService], + harvester_services: list[HarvesterService], farmer_service: FarmerService, event_loop: asyncio.events.AbstractEventLoop, ) -> AsyncIterator[TestRunner]: @@ -262,7 +263,7 @@ async def create_test_runner( yield TestRunner(harvesters, farmer, event_loop) -def create_example_plots(count: int, seeded_random: random.Random) -> List[PlotInfo]: +def create_example_plots(count: int, seeded_random: random.Random) -> list[PlotInfo]: @dataclass class DiskProver: file_name: str @@ -296,7 +297,7 @@ def get_compression_level(self) -> uint8: @pytest.mark.anyio async def test_sync_simulated( - farmer_three_harvester_not_started: Tuple[List[HarvesterService], FarmerService, BlockTools], + farmer_three_harvester_not_started: tuple[list[HarvesterService], FarmerService, BlockTools], event_loop: asyncio.events.AbstractEventLoop, seeded_random: random.Random, ) -> None: @@ -376,7 +377,7 @@ async def test_sync_simulated( ) @pytest.mark.anyio async def test_farmer_error_simulation( - farmer_one_harvester_not_started: Tuple[List[HarvesterService], FarmerService, BlockTools], + farmer_one_harvester_not_started: tuple[list[HarvesterService], FarmerService, BlockTools], event_loop: asyncio.events.AbstractEventLoop, simulate_error: ErrorSimulation, seeded_random: random.Random, @@ -402,7 +403,7 @@ async def test_farmer_error_simulation( @pytest.mark.parametrize("simulate_error", [ErrorSimulation.NonRecoverableError, ErrorSimulation.NotConnected]) @pytest.mark.anyio async def test_sync_reset_cases( - farmer_one_harvester_not_started: Tuple[List[HarvesterService], FarmerService, BlockTools], + farmer_one_harvester_not_started: tuple[list[HarvesterService], FarmerService, BlockTools], event_loop: asyncio.events.AbstractEventLoop, simulate_error: ErrorSimulation, seeded_random: random.Random, diff --git a/chia/_tests/plot_sync/util.py b/chia/_tests/plot_sync/util.py index 305959ad6222..e59b110b6c3b 100644 --- a/chia/_tests/plot_sync/util.py +++ b/chia/_tests/plot_sync/util.py @@ -2,8 +2,9 @@ import contextlib import time +from collections.abc import AsyncIterator from dataclasses import dataclass -from typing import AsyncIterator, Optional +from typing import Optional from chia._tests.util.split_managers import SplitAsyncManager, split_async_manager from chia._tests.util.time_out_assert import time_out_assert diff --git a/chia/_tests/plotting/test_plot_manager.py b/chia/_tests/plotting/test_plot_manager.py index b4ad3877a120..6989a77b8d33 100644 --- a/chia/_tests/plotting/test_plot_manager.py +++ b/chia/_tests/plotting/test_plot_manager.py @@ -3,11 +3,12 @@ import logging import sys import time +from collections.abc import Iterator from dataclasses import dataclass, replace from os import unlink from pathlib import Path from shutil import copy, move -from typing import Callable, Iterator, List, Optional, cast +from typing import Callable, Optional, cast import pytest from chia_rs import G1Element @@ -48,9 +49,9 @@ class MockPlotInfo: class Directory: path: Path - plots: List[Path] + plots: list[Path] - def __init__(self, path: Path, plots_origin: List[Path]): + def __init__(self, path: Path, plots_origin: list[Path]): self.path = path path.mkdir(parents=True, exist_ok=True) # Drop the existing files in the test directories @@ -66,10 +67,10 @@ def __init__(self, path: Path, plots_origin: List[Path]): def __len__(self): return len(self.plots) - def plot_info_list(self) -> List[MockPlotInfo]: + def plot_info_list(self) -> list[MockPlotInfo]: return [MockPlotInfo(MockDiskProver(str(x))) for x in self.plots] - def path_list(self) -> List[Path]: + def path_list(self) -> list[Path]: return self.plots def drop(self, path: Path): @@ -149,7 +150,7 @@ class Environment: def environment(tmp_path, bt) -> Iterator[Environment]: dir_1_count: int = 7 dir_2_count: int = 3 - plots: List[Path] = get_test_plots() + plots: list[Path] = get_test_plots() assert len(plots) >= dir_1_count + dir_2_count dir_1: Directory = Directory(tmp_path / "plots" / "1", plots[0:dir_1_count]) @@ -180,14 +181,14 @@ async def run_test_case( *, trigger: Callable, test_path: Path, - expect_loaded: List[MockPlotInfo], - expect_removed: List[Path], + expect_loaded: list[MockPlotInfo], + expect_removed: list[Path], expect_processed: int, expect_duplicates: int, expected_directories: int, expect_total_plots: int, ): - expected_result.loaded = cast(List[PlotInfo], expect_loaded) + expected_result.loaded = cast(list[PlotInfo], expect_loaded) expected_result.removed = expect_removed expected_result.processed = expect_processed trigger(env.root_path, str(test_path)) @@ -432,7 +433,7 @@ async def test_invalid_plots(environment): @pytest.mark.anyio async def test_keys_missing(environment: Environment) -> None: env: Environment = environment - not_in_keychain_plots: List[Path] = get_test_plots("not_in_keychain") + not_in_keychain_plots: list[Path] = get_test_plots("not_in_keychain") dir_not_in_keychain: Directory = Directory(env.root_path / "plots" / "not_in_keychain", not_in_keychain_plots) expected_result = PlotRefreshResult() # The plots in "not_in_keychain" directory have infinity g1 elements as farmer/pool key so they should be plots @@ -584,7 +585,7 @@ def modify_cache_entry(index: int, additional_data: int, modify_memo: bool) -> s ) return path - def assert_cache(expected: List[MockPlotInfo]) -> None: + def assert_cache(expected: list[MockPlotInfo]) -> None: test_cache = Cache(cache_path) assert len(test_cache) == 0 test_cache.load() diff --git a/chia/_tests/plotting/util.py b/chia/_tests/plotting/util.py index ba32d24f1f65..608491b40d2d 100644 --- a/chia/_tests/plotting/util.py +++ b/chia/_tests/plotting/util.py @@ -1,12 +1,11 @@ from __future__ import annotations from pathlib import Path -from typing import List from chia.simulator.block_tools import get_plot_dir -def get_test_plots(sub_dir: str = "") -> List[Path]: +def get_test_plots(sub_dir: str = "") -> list[Path]: path = get_plot_dir() if sub_dir != "": path = path / sub_dir diff --git a/chia/_tests/pools/test_pool_puzzles_lifecycle.py b/chia/_tests/pools/test_pool_puzzles_lifecycle.py index eb1548a01fd6..577469ea1cd8 100644 --- a/chia/_tests/pools/test_pool_puzzles_lifecycle.py +++ b/chia/_tests/pools/test_pool_puzzles_lifecycle.py @@ -1,7 +1,6 @@ from __future__ import annotations import copy -from typing import List from unittest import TestCase import pytest @@ -205,7 +204,7 @@ def test_pool_lifecycle(self): assert launcher_id_to_p2_puzzle_hash(launcher_id, DELAY_TIME, DELAY_PH) == p2_singleton_ph assert get_seconds_and_delayed_puzhash_from_p2_singleton_puzzle(p2_singleton_puz) == (DELAY_TIME, DELAY_PH) coin_db.farm_coin(p2_singleton_ph, time, 1750000000000) - coin_sols: List[CoinSpend] = create_absorb_spend( + coin_sols: list[CoinSpend] = create_absorb_spend( launcher_coinsol, pool_state, launcher_coin, @@ -226,7 +225,7 @@ def test_pool_lifecycle(self): coin_sols, ) )[0] - coin_sols: List[CoinSpend] = create_absorb_spend( + coin_sols: list[CoinSpend] = create_absorb_spend( last_coinsol, pool_state, launcher_coin, @@ -332,7 +331,7 @@ def test_pool_lifecycle(self): # create the farming reward coin_db.farm_coin(p2_singleton_ph, time, 1750000000000) # generate relevant coin solutions - coin_sols: List[CoinSpend] = create_absorb_spend( + coin_sols: list[CoinSpend] = create_absorb_spend( travel_coinsol, target_pool_state, launcher_coin, @@ -383,7 +382,7 @@ def test_pool_lifecycle(self): time = CoinTimestamp(20000000, 10005) # create the farming reward coin_db.farm_coin(p2_singleton_ph, time, 1750000000000) - coin_sols: List[CoinSpend] = create_absorb_spend( + coin_sols: list[CoinSpend] = create_absorb_spend( return_coinsol, pool_state, launcher_coin, diff --git a/chia/_tests/pools/test_pool_rpc.py b/chia/_tests/pools/test_pool_rpc.py index 29ff4704927b..f0e0e4bf9587 100644 --- a/chia/_tests/pools/test_pool_rpc.py +++ b/chia/_tests/pools/test_pool_rpc.py @@ -4,10 +4,11 @@ import contextlib import logging import tempfile +from collections.abc import AsyncIterator from dataclasses import dataclass from pathlib import Path from shutil import rmtree -from typing import Any, AsyncIterator, Dict, List, Tuple +from typing import Any import pytest @@ -91,7 +92,7 @@ def fee(trusted: bool) -> uint64: return uint64(0) -OneWalletNodeAndRpc = Tuple[WalletRpcClient, Any, FullNodeSimulator, int, BlockTools] +OneWalletNodeAndRpc = tuple[WalletRpcClient, Any, FullNodeSimulator, int, BlockTools] @pytest.fixture(scope="function") @@ -129,12 +130,12 @@ async def one_wallet_node_and_rpc( await client.await_closed() -Setup = Tuple[FullNodeSimulator, WalletNode, bytes32, int, WalletRpcClient] +Setup = tuple[FullNodeSimulator, WalletNode, bytes32, int, WalletRpcClient] @pytest.fixture(scope="function") async def setup( - one_wallet_and_one_simulator_services: Tuple[List[SimulatorFullNodeService], List[WalletService], BlockTools], + one_wallet_and_one_simulator_services: tuple[list[SimulatorFullNodeService], list[WalletService], BlockTools], trusted: bool, self_hostname: str, ) -> AsyncIterator[Setup]: @@ -216,8 +217,8 @@ async def test_create_new_pool_wallet_self_farm( assert status.current.relative_lock_height == 0 assert status.current.version == 1 # Check that config has been written properly - full_config: Dict[str, Any] = load_config(wallet.wallet_state_manager.root_path, "config.yaml") - pool_list: List[Dict[str, Any]] = full_config["pool"]["pool_list"] + full_config: dict[str, Any] = load_config(wallet.wallet_state_manager.root_path, "config.yaml") + pool_list: list[dict[str, Any]] = full_config["pool"]["pool_list"] assert len(pool_list) == 1 pool_config = pool_list[0] assert ( @@ -269,8 +270,8 @@ async def test_create_new_pool_wallet_farm_to_pool( assert status.current.relative_lock_height == 10 assert status.current.version == 1 # Check that config has been written properly - full_config: Dict[str, Any] = load_config(wallet.wallet_state_manager.root_path, "config.yaml") - pool_list: List[Dict[str, Any]] = full_config["pool"]["pool_list"] + full_config: dict[str, Any] = load_config(wallet.wallet_state_manager.root_path, "config.yaml") + pool_list: list[dict[str, Any]] = full_config["pool"]["pool_list"] assert len(pool_list) == 1 pool_config = pool_list[0] assert ( @@ -333,7 +334,7 @@ async def pw_created(check_wallet_id: int) -> bool: assert status_3.current.state == PoolSingletonState.SELF_POOLING.value full_config = load_config(wallet.wallet_state_manager.root_path, "config.yaml") - pool_list: List[Dict[str, Any]] = full_config["pool"]["pool_list"] + pool_list: list[dict[str, Any]] = full_config["pool"]["pool_list"] assert len(pool_list) == 2 assert len(await wallet_node.wallet_state_manager.tx_store.get_unconfirmed_for_wallet(2)) == 0 @@ -438,7 +439,7 @@ async def test_absorb_self( assert bal["confirmed_wallet_balance"] == 2 * 1_750_000_000_000 # Claim 2 * 1.75, and farm a new 1.75 - absorb_txs: List[TransactionRecord] = (await client.pw_absorb_rewards(2, uint64(fee)))["transactions"] + absorb_txs: list[TransactionRecord] = (await client.pw_absorb_rewards(2, uint64(fee)))["transactions"] await full_node_api.wait_transaction_records_entered_mempool(records=absorb_txs) await full_node_api.farm_blocks_to_puzzlehash(count=2, farm_to=our_ph, guarantee_transaction_blocks=True) await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node, timeout=20) @@ -450,7 +451,7 @@ async def test_absorb_self( assert bal["confirmed_wallet_balance"] == 1 * 1_750_000_000_000 # Claim another 1.75 - absorb_txs1: List[TransactionRecord] = (await client.pw_absorb_rewards(2, uint64(fee)))["transactions"] + absorb_txs1: list[TransactionRecord] = (await client.pw_absorb_rewards(2, uint64(fee)))["transactions"] await full_node_api.wait_transaction_records_entered_mempool(records=absorb_txs1) @@ -534,7 +535,7 @@ async def test_absorb_self_multiple_coins( assert bal["confirmed_wallet_balance"] == pool_expected_confirmed_balance # Claim - absorb_txs: List[TransactionRecord] = (await client.pw_absorb_rewards(2, uint64(fee), 1))["transactions"] + absorb_txs: list[TransactionRecord] = (await client.pw_absorb_rewards(2, uint64(fee), 1))["transactions"] await full_node_api.process_transaction_records(records=absorb_txs) main_expected_confirmed_balance -= fee main_expected_confirmed_balance += 1_750_000_000_000 @@ -603,7 +604,7 @@ async def farming_to_pool() -> bool: # Claim block_count * 1.75 ret = await client.pw_absorb_rewards(2, uint64(fee)) - absorb_txs: List[TransactionRecord] = ret["transactions"] + absorb_txs: list[TransactionRecord] = ret["transactions"] if fee == 0: assert ret["fee_transaction"] is None else: @@ -715,7 +716,7 @@ async def test_self_pooling_to_pooling(self, setup: Setup, fee: uint64, self_hos assert status_2.target is None await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node, timeout=20) - join_pool: Dict[str, Any] = await client.pw_join_pool( + join_pool: dict[str, Any] = await client.pw_join_pool( wallet_id, pool_ph, "https://pool.example.com", @@ -727,7 +728,7 @@ async def test_self_pooling_to_pooling(self, setup: Setup, fee: uint64, self_hos assert join_pool_tx is not None await full_node_api.wait_transaction_records_entered_mempool(records=[join_pool_tx]) - join_pool_2: Dict[str, Any] = await client.pw_join_pool( + join_pool_2: dict[str, Any] = await client.pw_join_pool( wallet_id_2, pool_ph, "https://pool.example.com", uint32(10), uint64(fee) ) assert join_pool_2["success"] @@ -820,7 +821,7 @@ async def status_is_farming_to_pool() -> bool: await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node, timeout=20) - leave_pool_tx: Dict[str, Any] = await client.pw_self_pool(wallet_id, uint64(fee)) + leave_pool_tx: dict[str, Any] = await client.pw_self_pool(wallet_id, uint64(fee)) assert leave_pool_tx["transaction"].wallet_id == wallet_id assert leave_pool_tx["transaction"].amount == 1 await full_node_api.wait_transaction_records_entered_mempool(records=leave_pool_tx["transactions"]) @@ -965,7 +966,7 @@ async def status_is_farming_to_pool() -> bool: assert pw_info.current.pool_url == "https://pool-a.org" assert pw_info.current.relative_lock_height == 5 - join_pool_txs: List[TransactionRecord] = ( + join_pool_txs: list[TransactionRecord] = ( await client.pw_join_pool( wallet_id, pool_b_ph, diff --git a/chia/_tests/pools/test_pool_wallet.py b/chia/_tests/pools/test_pool_wallet.py index e2d45a9db7ed..c6aabbe8d3db 100644 --- a/chia/_tests/pools/test_pool_wallet.py +++ b/chia/_tests/pools/test_pool_wallet.py @@ -2,7 +2,7 @@ from dataclasses import dataclass from pathlib import Path -from typing import Any, List, Optional, cast +from typing import Any, Optional, cast from unittest.mock import MagicMock import pytest @@ -56,7 +56,7 @@ async def test_update_pool_config_new_config(monkeypatch: Any) -> None: Test that PoolWallet can create a new pool config """ - updated_configs: List[MockPoolWalletConfig] = [] + updated_configs: list[MockPoolWalletConfig] = [] payout_instructions_ph = rand_hash() launcher_id: bytes32 = rand_hash() p2_singleton_puzzle_hash: bytes32 = rand_hash() @@ -75,14 +75,14 @@ async def test_update_pool_config_new_config(monkeypatch: Any) -> None: ) # No config data - def mock_load_pool_config(root_path: Path) -> List[MockPoolWalletConfig]: + def mock_load_pool_config(root_path: Path) -> list[MockPoolWalletConfig]: return [] monkeypatch.setattr("chia.pools.pool_wallet.load_pool_config", mock_load_pool_config) # Mock pool_config.update_pool_config to capture the updated configs async def mock_pool_config_update_pool_config( - root_path: Path, pool_config_list: List[MockPoolWalletConfig] + root_path: Path, pool_config_list: list[MockPoolWalletConfig] ) -> None: nonlocal updated_configs updated_configs = pool_config_list @@ -121,7 +121,7 @@ async def test_update_pool_config_existing_payout_instructions(monkeypatch: Any) Test that PoolWallet will retain existing payout_instructions when updating the pool config. """ - updated_configs: List[MockPoolWalletConfig] = [] + updated_configs: list[MockPoolWalletConfig] = [] payout_instructions_ph = rand_hash() launcher_id: bytes32 = rand_hash() p2_singleton_puzzle_hash: bytes32 = rand_hash() @@ -157,7 +157,7 @@ async def test_update_pool_config_existing_payout_instructions(monkeypatch: Any) ) # No config data - def mock_load_pool_config(root_path: Path) -> List[MockPoolWalletConfig]: + def mock_load_pool_config(root_path: Path) -> list[MockPoolWalletConfig]: nonlocal existing_config return [existing_config] @@ -165,7 +165,7 @@ def mock_load_pool_config(root_path: Path) -> List[MockPoolWalletConfig]: # Mock pool_config.update_pool_config to capture the updated configs async def mock_pool_config_update_pool_config( - root_path: Path, pool_config_list: List[MockPoolWalletConfig] + root_path: Path, pool_config_list: list[MockPoolWalletConfig] ) -> None: nonlocal updated_configs updated_configs = pool_config_list diff --git a/chia/_tests/pools/test_wallet_pool_store.py b/chia/_tests/pools/test_wallet_pool_store.py index 88c971cb11cd..852c6d941008 100644 --- a/chia/_tests/pools/test_wallet_pool_store.py +++ b/chia/_tests/pools/test_wallet_pool_store.py @@ -2,7 +2,7 @@ import random from dataclasses import dataclass, field -from typing import Dict, List, Optional +from typing import Optional import pytest from clvm_tools import binutils @@ -36,7 +36,7 @@ def make_child_solution( return sol -async def assert_db_spends(store: WalletPoolStore, wallet_id: int, spends: List[CoinSpend]) -> None: +async def assert_db_spends(store: WalletPoolStore, wallet_id: int, spends: list[CoinSpend]) -> None: db_spends = await store.get_spends_for_wallet(wallet_id) assert len(db_spends) == len(spends) for spend, (_, db_spend) in zip(spends, db_spends): @@ -46,7 +46,7 @@ async def assert_db_spends(store: WalletPoolStore, wallet_id: int, spends: List[ @dataclass class DummySpends: seeded_random: random.Random - spends_per_wallet: Dict[int, List[CoinSpend]] = field(default_factory=dict) + spends_per_wallet: dict[int, list[CoinSpend]] = field(default_factory=dict) def generate(self, wallet_id: int, count: int) -> None: current = self.spends_per_wallet.setdefault(wallet_id, []) diff --git a/chia/_tests/process_junit.py b/chia/_tests/process_junit.py index fb1388ba62a6..e348f374c99c 100644 --- a/chia/_tests/process_junit.py +++ b/chia/_tests/process_junit.py @@ -7,7 +7,7 @@ from dataclasses import dataclass, field from pathlib import Path from statistics import StatisticsError, mean, stdev -from typing import Any, Dict, List, Optional, TextIO, Tuple, Type, final +from typing import Any, Optional, TextIO, final import click import lxml.etree @@ -15,22 +15,22 @@ from chia._tests.util.misc import BenchmarkData, DataTypeProtocol, TestId from chia._tests.util.time_out_assert import TimeOutAssertData -supported_data_types: List[Type[DataTypeProtocol]] = [TimeOutAssertData, BenchmarkData] -supported_data_types_by_tag: Dict[str, Type[DataTypeProtocol]] = {cls.tag: cls for cls in supported_data_types} +supported_data_types: list[type[DataTypeProtocol]] = [TimeOutAssertData, BenchmarkData] +supported_data_types_by_tag: dict[str, type[DataTypeProtocol]] = {cls.tag: cls for cls in supported_data_types} @final @dataclass(frozen=True, order=True) class Result: file_path: Path - test_path: Tuple[str, ...] - ids: Tuple[str, ...] + test_path: tuple[str, ...] + ids: tuple[str, ...] label: str line: int = field(compare=False) - durations: Tuple[float, ...] = field(compare=False) + durations: tuple[float, ...] = field(compare=False) limit: float = field(compare=False) - def marshal(self) -> Dict[str, Any]: + def marshal(self) -> dict[str, Any]: return { "file_path": self.file_path.as_posix(), "test_path": self.test_path, @@ -133,7 +133,7 @@ def main( tree = lxml.etree.parse(xml_file) root = tree.getroot() - cases_by_test_id: defaultdict[TestId, List[lxml.etree.Element]] = defaultdict(list) + cases_by_test_id: defaultdict[TestId, list[lxml.etree.Element]] = defaultdict(list) for suite in root.findall("testsuite"): for case in suite.findall("testcase"): if case.find("skipped") is not None: @@ -145,7 +145,7 @@ def main( ) cases_by_test_id[test_id].append(case) - data_by_event_id: defaultdict[EventId, List[DataTypeProtocol]] = defaultdict(list) + data_by_event_id: defaultdict[EventId, list[DataTypeProtocol]] = defaultdict(list) for test_id, cases in cases_by_test_id.items(): for case in cases: for property in case.findall(f"properties/property[@name='{tag}']"): @@ -154,7 +154,7 @@ def main( event_id = EventId(test_id=test_id, tag=tag, line=data.line, path=data.path, label=data.label) data_by_event_id[event_id].append(data) - results: List[Result] = [] + results: list[Result] = [] for event_id, datas in data_by_event_id.items(): [limit] = {data.limit for data in datas} results.append( @@ -196,7 +196,7 @@ def output_benchmark( output: TextIO, percent_margin: int, randomoji: bool, - results: List[Result], + results: list[Result], ) -> None: if not markdown: for result in sorted(results): @@ -273,7 +273,7 @@ def output_time_out_assert( output: TextIO, percent_margin: int, randomoji: bool, - results: List[Result], + results: list[Result], ) -> None: if not markdown: for result in sorted(results): diff --git a/chia/_tests/rpc/test_rpc_client.py b/chia/_tests/rpc/test_rpc_client.py index ad86a8da7646..937e604e8840 100644 --- a/chia/_tests/rpc/test_rpc_client.py +++ b/chia/_tests/rpc/test_rpc_client.py @@ -1,8 +1,9 @@ from __future__ import annotations +from collections.abc import AsyncIterator from dataclasses import dataclass from pathlib import Path -from typing import Any, AsyncIterator, Dict, Optional +from typing import Any, Optional import pytest @@ -15,7 +16,7 @@ class InvalidCreateCase: id: str root_path: Optional[Path] = None - net_config: Optional[Dict[str, Any]] = None + net_config: Optional[dict[str, Any]] = None marks: Marks = () diff --git a/chia/_tests/simulation/test_simulation.py b/chia/_tests/simulation/test_simulation.py index e0e9967cf9ef..5557e2465c75 100644 --- a/chia/_tests/simulation/test_simulation.py +++ b/chia/_tests/simulation/test_simulation.py @@ -2,7 +2,7 @@ import importlib.metadata import json -from typing import AsyncIterator, List, Tuple +from collections.abc import AsyncIterator import aiohttp import pytest @@ -186,7 +186,7 @@ async def verify_daemon_connection(daemon: WebSocketServer, service: str) -> boo @pytest.mark.anyio async def test_simulator_auto_farm_and_get_coins( self, - two_wallet_nodes: Tuple[List[FullNodeSimulator], List[Tuple[WalletNode, ChiaServer]], BlockTools], + two_wallet_nodes: tuple[list[FullNodeSimulator], list[tuple[WalletNode, ChiaServer]], BlockTools], self_hostname: str, ) -> None: num_blocks = 2 @@ -485,7 +485,7 @@ async def test_process_transactions( async def test_create_coins_with_invalid_amounts_raises( self, self_hostname: str, - amounts: List[uint64], + amounts: list[uint64], simulator_and_wallet: OldSimulatorsAndWallets, ) -> None: [[full_node_api], [[wallet_node, wallet_server]], _] = simulator_and_wallet diff --git a/chia/_tests/simulation/test_simulator.py b/chia/_tests/simulation/test_simulator.py index 0c7ab7ccbb5a..8cf569abcdab 100644 --- a/chia/_tests/simulation/test_simulator.py +++ b/chia/_tests/simulation/test_simulator.py @@ -1,7 +1,5 @@ from __future__ import annotations -from typing import List, Tuple - import pytest from chia._tests.util.setup_nodes import OldSimulatorsAndWallets @@ -21,7 +19,7 @@ async def test_simulation_farm_blocks_to_puzzlehash( count: int, guarantee_transaction_blocks: bool, - simulator_and_wallet: Tuple[List[FullNodeSimulator], List[Tuple[WalletNode, ChiaServer]], BlockTools], + simulator_and_wallet: tuple[list[FullNodeSimulator], list[tuple[WalletNode, ChiaServer]], BlockTools], ) -> None: [[full_node_api], _, _] = simulator_and_wallet @@ -41,7 +39,7 @@ async def test_simulation_farm_blocks_to_puzzlehash( @pytest.mark.parametrize(argnames="count", argvalues=[0, 1, 2, 5, 10]) async def test_simulation_farm_blocks_to_wallet( count: int, - simulator_and_wallet: Tuple[List[FullNodeSimulator], List[Tuple[WalletNode, ChiaServer]], BlockTools], + simulator_and_wallet: tuple[list[FullNodeSimulator], list[tuple[WalletNode, ChiaServer]], BlockTools], ) -> None: [[full_node_api], [[wallet_node, wallet_server]], _] = simulator_and_wallet @@ -79,7 +77,7 @@ async def test_simulation_farm_blocks_to_wallet( async def test_simulation_farm_rewards_to_wallet( amount: int, coin_count: int, - simulator_and_wallet: Tuple[List[FullNodeSimulator], List[Tuple[WalletNode, ChiaServer]], BlockTools], + simulator_and_wallet: tuple[list[FullNodeSimulator], list[tuple[WalletNode, ChiaServer]], BlockTools], ) -> None: [[full_node_api], [[wallet_node, wallet_server]], _] = simulator_and_wallet @@ -107,7 +105,7 @@ async def test_simulation_farm_rewards_to_wallet( @pytest.mark.anyio async def test_wait_transaction_records_entered_mempool( - simulator_and_wallet: Tuple[List[FullNodeSimulator], List[Tuple[WalletNode, ChiaServer]], BlockTools], + simulator_and_wallet: tuple[list[FullNodeSimulator], list[tuple[WalletNode, ChiaServer]], BlockTools], ) -> None: repeats = 50 tx_amount = 1 @@ -143,7 +141,7 @@ async def test_wait_transaction_records_entered_mempool( @pytest.mark.anyio async def test_process_transaction_records( - simulator_and_wallet: Tuple[List[FullNodeSimulator], List[Tuple[WalletNode, ChiaServer]], BlockTools], + simulator_and_wallet: tuple[list[FullNodeSimulator], list[tuple[WalletNode, ChiaServer]], BlockTools], ) -> None: repeats = 50 tx_amount = 1 @@ -187,7 +185,7 @@ async def test_process_transaction_records( ], ) async def test_create_coins_with_amounts( - self_hostname: str, amounts: List[uint64], simulator_and_wallet: OldSimulatorsAndWallets + self_hostname: str, amounts: list[uint64], simulator_and_wallet: OldSimulatorsAndWallets ) -> None: [[full_node_api], [[wallet_node, wallet_server]], _] = simulator_and_wallet await wallet_server.start_client(PeerInfo(self_hostname, full_node_api.server.get_port()), None) @@ -213,8 +211,8 @@ async def test_create_coins_with_amounts( ids=lambda amounts: ", ".join(str(amount) for amount in amounts), ) async def test_create_coins_with_invalid_amounts_raises( - amounts: List[int], - simulator_and_wallet: Tuple[List[FullNodeSimulator], List[Tuple[WalletNode, ChiaServer]], BlockTools], + amounts: list[int], + simulator_and_wallet: tuple[list[FullNodeSimulator], list[tuple[WalletNode, ChiaServer]], BlockTools], ) -> None: [[full_node_api], [[wallet_node, wallet_server]], _] = simulator_and_wallet diff --git a/chia/_tests/simulation/test_start_simulator.py b/chia/_tests/simulation/test_start_simulator.py index f04782f95848..fa2bf9444077 100644 --- a/chia/_tests/simulation/test_start_simulator.py +++ b/chia/_tests/simulation/test_start_simulator.py @@ -1,8 +1,9 @@ from __future__ import annotations import asyncio +from collections.abc import AsyncGenerator from pathlib import Path -from typing import Any, AsyncGenerator, Dict, Tuple +from typing import Any import pytest @@ -28,13 +29,13 @@ class TestStartSimulator: @pytest.fixture(scope="function") async def get_chia_simulator( self, tmp_path: Path, empty_keyring: Keychain - ) -> AsyncGenerator[Tuple[FullNodeSimulator, Path, Dict[str, Any], str, int, Keychain], None]: + ) -> AsyncGenerator[tuple[FullNodeSimulator, Path, dict[str, Any], str, int, Keychain], None]: async for simulator_args in get_full_chia_simulator(chia_root=tmp_path, keychain=empty_keyring): yield simulator_args @pytest.mark.anyio async def test_start_simulator( - self, get_chia_simulator: Tuple[FullNodeSimulator, Path, Dict[str, Any], str, int, Keychain] + self, get_chia_simulator: tuple[FullNodeSimulator, Path, dict[str, Any], str, int, Keychain] ) -> None: simulator, root_path, config, mnemonic, fingerprint, keychain = get_chia_simulator ph_1: bytes32 = get_puzzle_hash_from_key(keychain=keychain, fingerprint=fingerprint, key_id=1) diff --git a/chia/_tests/timelord/test_new_peak.py b/chia/_tests/timelord/test_new_peak.py index 9d605d3c350b..1891e0f0cc42 100644 --- a/chia/_tests/timelord/test_new_peak.py +++ b/chia/_tests/timelord/test_new_peak.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import List, Optional, Tuple +from typing import Optional import pytest @@ -27,7 +27,7 @@ class TestNewPeak: @pytest.mark.anyio async def test_timelord_new_peak_basic( - self, bt: BlockTools, timelord: Tuple[TimelordAPI, ChiaServer], default_1000_blocks: List[FullBlock] + self, bt: BlockTools, timelord: tuple[TimelordAPI, ChiaServer], default_1000_blocks: list[FullBlock] ) -> None: async with create_blockchain(bt.constants, 2) as (b1, db_wrapper1): async with create_blockchain(bt.constants, 2) as (b2, db_wrapper2): @@ -65,7 +65,7 @@ async def test_timelord_new_peak_basic( @pytest.mark.anyio async def test_timelord_new_peak_unfinished_not_orphaned( - self, bt: BlockTools, timelord: Tuple[TimelordAPI, ChiaServer], default_1000_blocks: List[FullBlock] + self, bt: BlockTools, timelord: tuple[TimelordAPI, ChiaServer], default_1000_blocks: list[FullBlock] ) -> None: async with create_blockchain(bt.constants, 2) as (b1, db_wrapper1): timelord_api, _ = timelord @@ -116,9 +116,9 @@ async def test_timelord_new_peak_unfinished_not_orphaned( @pytest.mark.anyio async def test_timelord_new_peak_unfinished_orphaned( self, - one_node: Tuple[List[FullNodeService], List[FullNodeSimulator], BlockTools], - timelord: Tuple[TimelordAPI, ChiaServer], - default_1000_blocks: List[FullBlock], + one_node: tuple[list[FullNodeService], list[FullNodeSimulator], BlockTools], + timelord: tuple[TimelordAPI, ChiaServer], + default_1000_blocks: list[FullBlock], ) -> None: [full_node_service], _, bt = one_node full_node = full_node_service._node @@ -225,7 +225,7 @@ async def test_timelord_new_peak_unfinished_orphaned( @pytest.mark.anyio async def test_timelord_new_peak_unfinished_orphaned_overflow( - self, bt: BlockTools, timelord: Tuple[TimelordAPI, ChiaServer], default_1000_blocks: List[FullBlock] + self, bt: BlockTools, timelord: tuple[TimelordAPI, ChiaServer], default_1000_blocks: list[FullBlock] ) -> None: async with create_blockchain(bt.constants, 2) as (b1, db_wrapper1): async with create_blockchain(bt.constants, 2) as (b2, db_wrapper2): @@ -286,7 +286,7 @@ async def test_timelord_new_peak_unfinished_orphaned_overflow( @pytest.mark.anyio async def test_timelord_new_peak_unfinished_eos( - self, bt: BlockTools, timelord: Tuple[TimelordAPI, ChiaServer], default_1000_blocks: List[FullBlock] + self, bt: BlockTools, timelord: tuple[TimelordAPI, ChiaServer], default_1000_blocks: list[FullBlock] ) -> None: async with create_blockchain(bt.constants, 2) as (b1, db_wrapper1): async with create_blockchain(bt.constants, 2) as (b2, db_wrapper2): @@ -372,11 +372,11 @@ async def get_rc_prev(blockchain: Blockchain, block: FullBlock) -> bytes32: return rc_prev -def get_recent_reward_challenges(blockchain: Blockchain) -> List[Tuple[bytes32, uint128]]: +def get_recent_reward_challenges(blockchain: Blockchain) -> list[tuple[bytes32, uint128]]: peak = blockchain.get_peak() if peak is None: return [] - recent_rc: List[Tuple[bytes32, uint128]] = [] + recent_rc: list[tuple[bytes32, uint128]] = [] curr: Optional[BlockRecord] = peak while curr is not None and len(recent_rc) < 2 * blockchain.constants.MAX_SUB_SLOT_BLOCKS: if curr != peak: diff --git a/chia/_tests/tools/test_run_block.py b/chia/_tests/tools/test_run_block.py index e65a3948e331..458accc11b63 100644 --- a/chia/_tests/tools/test_run_block.py +++ b/chia/_tests/tools/test_run_block.py @@ -2,7 +2,6 @@ import json from pathlib import Path -from typing import List from chia._tests.util.run_block import run_json_block from chia.consensus.default_constants import DEFAULT_CONSTANTS @@ -41,7 +40,7 @@ ) -def find_retirement(tocheck: List[ConditionWithArgs]) -> bool: +def find_retirement(tocheck: list[ConditionWithArgs]) -> bool: for c in tocheck: if c.opcode != ConditionOpcode.CREATE_COIN: continue diff --git a/chia/_tests/tools/test_virtual_project.py b/chia/_tests/tools/test_virtual_project.py index 1b4bc5d37d25..8d18e5ac6f47 100644 --- a/chia/_tests/tools/test_virtual_project.py +++ b/chia/_tests/tools/test_virtual_project.py @@ -2,7 +2,7 @@ import textwrap from pathlib import Path -from typing import Any, Callable, Dict, List +from typing import Any, Callable import click import pytest @@ -213,7 +213,7 @@ def test_print_dependency_graph(chia_package_structure: Path) -> None: # Mock the build_dependency_graph function to control its output -def mock_build_dependency_graph(dir_params: DirectoryParameters) -> Dict[Path, List[Path]]: +def mock_build_dependency_graph(dir_params: DirectoryParameters) -> dict[Path, list[Path]]: return { Path("/path/to/package1/module1.py"): [ Path("/path/to/package2/module2.py"), @@ -284,7 +284,7 @@ def prepare_mocks2(monkeypatch: pytest.MonkeyPatch) -> None: def test_cycle_detection(prepare_mocks2: None) -> None: # Example graph with a simple cycle - graph: Dict[Path, List[Path]] = { + graph: dict[Path, list[Path]] = { Path("/path/to/package1/module1.py"): [Path("/path/to/package2/module2.py")], Path("/path/to/package2/module2.py"): [Path("/path/to/package3/module3.py")], # Cycle here Path("/path/to/package3/module3.py"): [], @@ -414,7 +414,7 @@ def test_excluded_paths_handling(prepare_mocks2: None) -> None: def test_ignore_cycles_in_specific_packages(prepare_mocks2: None) -> None: - graph: Dict[Path, List[Path]] = { + graph: dict[Path, list[Path]] = { Path("/path/to/package1/module1.py"): [Path("/path/to/package2/module2.py")], Path("/path/to/package2/module2.py"): [Path("/path/to/package3/module3.py")], Path("/path/to/package3/module3.py"): [], @@ -489,8 +489,8 @@ def sample_function(config: Config) -> None: # Helper function to create a temporary YAML configuration file @pytest.fixture -def create_yaml_config(tmp_path: Path) -> Callable[[Dict[str, Any]], Path]: - def _create_yaml_config(content: Dict[str, Any]) -> Path: +def create_yaml_config(tmp_path: Path) -> Callable[[dict[str, Any]], Path]: + def _create_yaml_config(content: dict[str, Any]) -> Path: path = tmp_path / "config.yaml" with open(path, "w") as f: yaml.dump(content, f) @@ -499,7 +499,7 @@ def _create_yaml_config(content: Dict[str, Any]) -> Path: return _create_yaml_config -def test_config_with_yaml(create_yaml_config: Callable[[Dict[str, Any]], Path]) -> None: +def test_config_with_yaml(create_yaml_config: Callable[[dict[str, Any]], Path]) -> None: # Create a temporary YAML configuration file yaml_config = { "exclude_paths": ["path/to/exclude"], diff --git a/chia/_tests/util/benchmarks.py b/chia/_tests/util/benchmarks.py index d80a0a0a4a76..e3ddd72da7a2 100644 --- a/chia/_tests/util/benchmarks.py +++ b/chia/_tests/util/benchmarks.py @@ -1,7 +1,6 @@ from __future__ import annotations import random -from typing import Tuple import importlib_resources from chia_rs import AugSchemeMPL, ClassgroupElement, Coin, G1Element, G2Element, VDFInfo, VDFProof @@ -24,7 +23,7 @@ clvm_generator = clvm_generator_bin_path.read_bytes() -def rewards(height: uint32) -> Tuple[Coin, Coin]: +def rewards(height: uint32) -> tuple[Coin, Coin]: farmer_coin = create_farmer_coin(height, ph, uint64(250000000), DEFAULT_CONSTANTS.GENESIS_CHALLENGE) pool_coin = create_pool_coin(height, ph, uint64(1750000000), DEFAULT_CONSTANTS.GENESIS_CHALLENGE) return farmer_coin, pool_coin diff --git a/chia/_tests/util/blockchain.py b/chia/_tests/util/blockchain.py index 356b55de41c4..e5683db6df85 100644 --- a/chia/_tests/util/blockchain.py +++ b/chia/_tests/util/blockchain.py @@ -3,8 +3,9 @@ import contextlib import os import pickle +from collections.abc import AsyncIterator from pathlib import Path -from typing import AsyncIterator, List, Optional, Tuple +from typing import Optional from chia.consensus.blockchain import Blockchain from chia.consensus.constants import ConsensusConstants @@ -19,7 +20,7 @@ @contextlib.asynccontextmanager async def create_blockchain( constants: ConsensusConstants, db_version: int -) -> AsyncIterator[Tuple[Blockchain, DBWrapper2]]: +) -> AsyncIterator[tuple[Blockchain, DBWrapper2]]: db_uri = generate_in_memory_db_uri() async with DBWrapper2.managed(database=db_uri, uri=True, reader_count=1, db_version=db_version) as wrapper: coin_store = await CoinStore.create(wrapper) @@ -43,11 +44,11 @@ def persistent_blocks( normalized_to_identity_icc_eos: bool = False, normalized_to_identity_cc_sp: bool = False, normalized_to_identity_cc_ip: bool = False, - block_list_input: Optional[List[FullBlock]] = None, + block_list_input: Optional[list[FullBlock]] = None, time_per_block: Optional[float] = None, dummy_block_references: bool = False, include_transactions: bool = False, -) -> List[FullBlock]: +) -> list[FullBlock]: # try loading from disc, if not create new blocks.db file # TODO hash fixtures.py and blocktool.py, add to path, delete if the files changed if block_list_input is None: @@ -65,8 +66,8 @@ def persistent_blocks( print(f"File found at: {file_path}") try: bytes_list = file_path.read_bytes() - block_bytes_list: List[bytes] = pickle.loads(bytes_list) - blocks: List[FullBlock] = [] + block_bytes_list: list[bytes] = pickle.loads(bytes_list) + blocks: list[FullBlock] = [] for block_bytes in block_bytes_list: blocks.append(FullBlock.from_bytes_unchecked(block_bytes)) if len(blocks) == num_of_blocks + len(block_list_input): @@ -101,7 +102,7 @@ def new_test_db( seed: bytes, empty_sub_slots: int, bt: BlockTools, - block_list_input: List[FullBlock], + block_list_input: list[FullBlock], time_per_block: Optional[float], *, normalized_to_identity_cc_eos: bool = False, # CC_EOS, @@ -110,9 +111,9 @@ def new_test_db( normalized_to_identity_cc_ip: bool = False, # CC_IP dummy_block_references: bool = False, include_transactions: bool = False, -) -> List[FullBlock]: +) -> list[FullBlock]: print(f"create {path} with {num_of_blocks} blocks with ") - blocks: List[FullBlock] = bt.get_consecutive_blocks( + blocks: list[FullBlock] = bt.get_consecutive_blocks( num_of_blocks, block_list_input=block_list_input, time_per_block=time_per_block, @@ -125,7 +126,7 @@ def new_test_db( dummy_block_references=dummy_block_references, include_transactions=include_transactions, ) - block_bytes_list: List[bytes] = [] + block_bytes_list: list[bytes] = [] for block in blocks: block_bytes_list.append(bytes(block)) bytes_fn = pickle.dumps(block_bytes_list) diff --git a/chia/_tests/util/blockchain_mock.py b/chia/_tests/util/blockchain_mock.py index 1f4c535e5030..cad13a5eeba5 100644 --- a/chia/_tests/util/blockchain_mock.py +++ b/chia/_tests/util/blockchain_mock.py @@ -1,7 +1,7 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING, ClassVar, Dict, List, Optional, Set, cast +from typing import TYPE_CHECKING, ClassVar, Optional, cast from chia.consensus.block_record import BlockRecord from chia.types.blockchain_format.sized_bytes import bytes32 @@ -21,10 +21,10 @@ class BlockchainMock: def __init__( self, - blocks: Dict[bytes32, BlockRecord], - headers: Optional[Dict[bytes32, HeaderBlock]] = None, - height_to_hash: Optional[Dict[uint32, bytes32]] = None, - sub_epoch_summaries: Optional[Dict[uint32, SubEpochSummary]] = None, + blocks: dict[bytes32, BlockRecord], + headers: Optional[dict[bytes32, HeaderBlock]] = None, + height_to_hash: Optional[dict[uint32, bytes32]] = None, + sub_epoch_summaries: Optional[dict[uint32, SubEpochSummary]] = None, ): if sub_epoch_summaries is None: sub_epoch_summaries = {} @@ -36,7 +36,7 @@ def __init__( self._headers = headers self._height_to_hash = height_to_hash self._sub_epoch_summaries = sub_epoch_summaries - self._sub_epoch_segments: Dict[bytes32, SubEpochSegments] = {} + self._sub_epoch_segments: dict[bytes32, SubEpochSegments] = {} self.log = logging.getLogger(__name__) def get_peak(self) -> Optional[BlockRecord]: @@ -56,7 +56,7 @@ def height_to_block_record(self, height: uint32, check_db: bool = False) -> Bloc return self.block_record(header_hash) - def get_ses_heights(self) -> List[uint32]: + def get_ses_heights(self) -> list[uint32]: return sorted(self._sub_epoch_summaries.keys()) def get_ses(self, height: uint32) -> SubEpochSummary: @@ -78,11 +78,11 @@ def contains_height(self, height: uint32) -> bool: async def warmup(self, fork_point: uint32) -> None: return - async def get_block_records_in_range(self, start: int, stop: int) -> Dict[bytes32, BlockRecord]: + async def get_block_records_in_range(self, start: int, stop: int) -> dict[bytes32, BlockRecord]: return self._block_records - async def get_block_records_at(self, heights: List[uint32]) -> List[BlockRecord]: - block_records: List[BlockRecord] = [] + async def get_block_records_at(self, heights: list[uint32]) -> list[BlockRecord]: + block_records: list[BlockRecord] = [] for height in heights: block_records.append(self.height_to_block_record(height)) return block_records @@ -93,7 +93,7 @@ def try_block_record(self, header_hash: bytes32) -> Optional[BlockRecord]: async def get_block_record_from_db(self, header_hash: bytes32) -> Optional[BlockRecord]: return self._block_records[header_hash] - async def prev_block_hash(self, header_hashes: List[bytes32]) -> List[bytes32]: + async def prev_block_hash(self, header_hashes: list[bytes32]) -> list[bytes32]: ret = [] for h in header_hashes: ret.append(self._block_records[h].prev_hash) @@ -107,18 +107,18 @@ def add_block_record(self, block: BlockRecord) -> None: async def get_header_blocks_in_range( self, start: int, stop: int, tx_filter: bool = True - ) -> Dict[bytes32, HeaderBlock]: + ) -> dict[bytes32, HeaderBlock]: return self._headers async def persist_sub_epoch_challenge_segments( - self, sub_epoch_summary_hash: bytes32, segments: List[SubEpochChallengeSegment] + self, sub_epoch_summary_hash: bytes32, segments: list[SubEpochChallengeSegment] ) -> None: self._sub_epoch_segments[sub_epoch_summary_hash] = SubEpochSegments(segments) async def get_sub_epoch_challenge_segments( self, sub_epoch_summary_hash: bytes32, - ) -> Optional[List[SubEpochChallengeSegment]]: + ) -> Optional[list[SubEpochChallengeSegment]]: segments = self._sub_epoch_segments.get(sub_epoch_summary_hash) if segments is None: return None @@ -127,6 +127,6 @@ async def get_sub_epoch_challenge_segments( def seen_compact_proofs(self, vdf_info: VDFInfo, height: uint32) -> bool: return False - async def lookup_block_generators(self, header_hash: bytes32, generator_refs: Set[uint32]) -> Dict[uint32, bytes]: + async def lookup_block_generators(self, header_hash: bytes32, generator_refs: set[uint32]) -> dict[uint32, bytes]: # not implemented assert False # pragma: no cover diff --git a/chia/_tests/util/build_network_protocol_files.py b/chia/_tests/util/build_network_protocol_files.py index f83f995cd731..1b5c324a344f 100644 --- a/chia/_tests/util/build_network_protocol_files.py +++ b/chia/_tests/util/build_network_protocol_files.py @@ -188,14 +188,13 @@ def build_protocol_test() -> str: from __future__ import annotations from pathlib import Path -from typing import Tuple from chia._tests.util.build_network_protocol_files import get_network_protocol_filename from chia._tests.util.network_protocol_data import * # noqa: F403 from chia._tests.util.protocol_messages_json import * # noqa: F403 -def parse_blob(input_bytes: bytes) -> Tuple[bytes, bytes]: +def parse_blob(input_bytes: bytes) -> tuple[bytes, bytes]: size_bytes = input_bytes[:4] input_bytes = input_bytes[4:] size = int.from_bytes(size_bytes, "big") @@ -236,14 +235,14 @@ def get_protocol_json() -> str: result = """# this file is generated by build_network_protocol_files.py from __future__ import annotations -from typing import Any, Dict +from typing import Any """ counter = 0 def visitor(obj: Any, name: str) -> None: nonlocal result nonlocal counter - result += f"\n{name}_json: Dict[str, Any] = {obj.to_json_dict()}\n" + result += f"\n{name}_json: dict[str, Any] = {obj.to_json_dict()}\n" counter += 1 visit_all_messages(visitor) diff --git a/chia/_tests/util/db_connection.py b/chia/_tests/util/db_connection.py index f6cd0f9c47cd..e15cfa6ed510 100644 --- a/chia/_tests/util/db_connection.py +++ b/chia/_tests/util/db_connection.py @@ -1,9 +1,10 @@ from __future__ import annotations import tempfile +from collections.abc import AsyncIterator from contextlib import asynccontextmanager from pathlib import Path -from typing import AsyncIterator, Optional, Type +from typing import Optional import aiosqlite @@ -14,7 +15,7 @@ async def DBConnection( db_version: int, foreign_keys: Optional[bool] = None, - row_factory: Optional[Type[aiosqlite.Row]] = None, + row_factory: Optional[type[aiosqlite.Row]] = None, ) -> AsyncIterator[DBWrapper2]: db_uri = generate_in_memory_db_uri() async with DBWrapper2.managed( diff --git a/chia/_tests/util/full_sync.py b/chia/_tests/util/full_sync.py index faad229a8529..e6d53a1ff236 100644 --- a/chia/_tests/util/full_sync.py +++ b/chia/_tests/util/full_sync.py @@ -5,9 +5,10 @@ import shutil import tempfile import time +from collections.abc import Iterator from contextlib import contextmanager from pathlib import Path -from typing import Callable, Iterator, List, Optional, cast +from typing import Callable, Optional, cast import aiosqlite import zstd @@ -58,13 +59,13 @@ def enable_profiler(profile: bool, counter: int) -> Iterator[None]: class FakeServer: async def send_to_all( - self, messages: List[Message], node_type: NodeType, exclude: Optional[bytes32] = None + self, messages: list[Message], node_type: NodeType, exclude: Optional[bytes32] = None ) -> None: pass async def send_to_all_if( self, - messages: List[Message], + messages: list[Message], node_type: NodeType, predicate: Callable[[WSChiaConnection], bool], exclude: Optional[bytes32] = None, @@ -79,7 +80,7 @@ async def get_peer_info(self) -> Optional[PeerInfo]: def get_connections( self, node_type: Optional[NodeType] = None, *, outbound: Optional[bool] = False - ) -> List[WSChiaConnection]: + ) -> list[WSChiaConnection]: return [] def is_duplicate_or_self_connection(self, target_node: PeerInfo) -> bool: diff --git a/chia/_tests/util/gen_ssl_certs.py b/chia/_tests/util/gen_ssl_certs.py index 7c3b8576a318..b15ccf97eb9d 100644 --- a/chia/_tests/util/gen_ssl_certs.py +++ b/chia/_tests/util/gen_ssl_certs.py @@ -43,7 +43,7 @@ def patched_write_ssl_cert_and_key(cert_path: Path, cert_data: bytes, key_path: private_ca_key: Optional[bytes] = None capture_cert_and_key = True - print("from typing import Dict, Tuple") + print("from typing import Tuple") print() make_ca_cert(Path("SSL_TEST_PRIVATE_CA_CRT"), Path("SSL_TEST_PRIVATE_CA_KEY")) diff --git a/chia/_tests/util/generator_tools_testing.py b/chia/_tests/util/generator_tools_testing.py index 19825b4004c1..77d9b3bf64c1 100644 --- a/chia/_tests/util/generator_tools_testing.py +++ b/chia/_tests/util/generator_tools_testing.py @@ -1,7 +1,5 @@ from __future__ import annotations -from typing import List, Tuple - from chia.consensus.constants import ConsensusConstants from chia.consensus.default_constants import DEFAULT_CONSTANTS from chia.full_node.mempool_check_conditions import get_name_puzzle_conditions @@ -20,9 +18,9 @@ def run_and_get_removals_and_additions( height: uint32, constants: ConsensusConstants = DEFAULT_CONSTANTS, mempool_mode: bool = False, -) -> Tuple[List[bytes32], List[Coin]]: - removals: List[bytes32] = [] - additions: List[Coin] = [] +) -> tuple[list[bytes32], list[Coin]]: + removals: list[bytes32] = [] + additions: list[Coin] = [] assert len(block.transactions_generator_ref_list) == 0 if not block.is_transaction_block(): diff --git a/chia/_tests/util/key_tool.py b/chia/_tests/util/key_tool.py index 7efe497b0eb0..ef8b346304da 100644 --- a/chia/_tests/util/key_tool.py +++ b/chia/_tests/util/key_tool.py @@ -1,7 +1,6 @@ from __future__ import annotations from dataclasses import dataclass, field -from typing import Dict, List from chia_rs import AugSchemeMPL, G1Element, G2Element, PrivateKey @@ -13,9 +12,9 @@ @dataclass class KeyTool: - dict: Dict[G1Element, int] = field(default_factory=dict) + dict: dict[G1Element, int] = field(default_factory=dict) - def add_secret_exponents(self, secret_exponents: List[int]) -> None: + def add_secret_exponents(self, secret_exponents: list[int]) -> None: for _ in secret_exponents: self.dict[int_to_public_key(_)] = _ % GROUP_ORDER diff --git a/chia/_tests/util/misc.py b/chia/_tests/util/misc.py index 3b2bd95fb478..b257fc2a576b 100644 --- a/chia/_tests/util/misc.py +++ b/chia/_tests/util/misc.py @@ -12,6 +12,7 @@ import ssl import subprocess import sys +from collections.abc import Awaitable, Collection, Iterable, Iterator from concurrent.futures import Future from dataclasses import dataclass, field from enum import Enum @@ -21,27 +22,7 @@ from textwrap import dedent from time import thread_time from types import TracebackType -from typing import ( - TYPE_CHECKING, - Any, - Awaitable, - Callable, - ClassVar, - Collection, - Dict, - Iterable, - Iterator, - List, - Optional, - Protocol, - TextIO, - Tuple, - Type, - TypeVar, - Union, - cast, - final, -) +from typing import TYPE_CHECKING, Any, Callable, ClassVar, Optional, Protocol, TextIO, TypeVar, Union, cast, final import aiohttp import pytest @@ -187,7 +168,7 @@ def measure_overhead( ], cycles: int = 10, ) -> float: - times: List[float] = [] + times: list[float] = [] for _ in range(cycles): with manager_maker() as results: @@ -258,10 +239,10 @@ class BenchmarkData: label: str - __match_args__: ClassVar[Tuple[str, ...]] = () + __match_args__: ClassVar[tuple[str, ...]] = () @classmethod - def unmarshal(cls, marshalled: Dict[str, Any]) -> BenchmarkData: + def unmarshal(cls, marshalled: dict[str, Any]) -> BenchmarkData: return cls( duration=marshalled["duration"], path=pathlib.Path(marshalled["path"]), @@ -270,7 +251,7 @@ def unmarshal(cls, marshalled: Dict[str, Any]) -> BenchmarkData: label=marshalled["label"], ) - def marshal(self) -> Dict[str, Any]: + def marshal(self) -> dict[str, Any]: return { "duration": self.duration, "path": self.path.as_posix(), @@ -337,7 +318,7 @@ def __enter__(self) -> Future[AssertRuntimeResults]: def __exit__( self, - exc_type: Optional[Type[BaseException]], + exc_type: Optional[type[BaseException]], exc: Optional[BaseException], traceback: Optional[TracebackType], ) -> None: @@ -405,7 +386,7 @@ def assert_rpc_error(error: str) -> Iterator[None]: @contextlib.contextmanager -def closing_chia_root_popen(chia_root: ChiaRoot, args: List[str]) -> Iterator[subprocess.Popen[Any]]: +def closing_chia_root_popen(chia_root: ChiaRoot, args: list[str]) -> Iterator[subprocess.Popen[Any]]: environment = {**os.environ, "CHIA_ROOT": os.fspath(chia_root.path)} with subprocess.Popen(args=args, env=environment) as process: @@ -476,7 +457,7 @@ def get(self, parent_coin_id: Optional[bytes32] = None, include_hint: bool = Tru return HintedCoin(Coin(parent_coin_id, self._get_hash(), self._get_amount()), hint) -def coin_creation_args(hinted_coin: HintedCoin) -> List[Any]: +def coin_creation_args(hinted_coin: HintedCoin) -> list[Any]: if hinted_coin.hint is not None: memos = [hinted_coin.hint] else: @@ -515,7 +496,7 @@ async def wallet_height_at_least(wallet_node: WalletNode, h: uint32) -> bool: @dataclass class RecordingWebServer: web_server: WebServer - requests: List[web.Request] = field(default_factory=list) + requests: list[web.Request] = field(default_factory=list) @classmethod async def create( @@ -541,7 +522,7 @@ async def create( await web_server.start() return self - def get_routes(self) -> Dict[str, Callable[[web.Request], Awaitable[web.Response]]]: + def get_routes(self) -> dict[str, Callable[[web.Request], Awaitable[web.Response]]]: return {"/{path:.*}": self.handler} async def handler(self, request: web.Request) -> web.Response: @@ -564,12 +545,12 @@ async def await_closed(self) -> None: @dataclasses.dataclass(frozen=True) class TestId: platform: str - test_path: Tuple[str, ...] - ids: Tuple[str, ...] + test_path: tuple[str, ...] + ids: tuple[str, ...] @classmethod def create(cls, node: Node, platform: str = sys.platform) -> TestId: - test_path: List[str] = [] + test_path: list[str] = [] temp_node = node while True: name: str @@ -589,7 +570,7 @@ def create(cls, node: Node, platform: str = sys.platform) -> TestId: # TODO: can we avoid parsing the id's etc from the node name? test_name, delimiter, rest = node.name.partition("[") - ids: Tuple[str, ...] + ids: tuple[str, ...] if delimiter == "": ids = () else: @@ -602,14 +583,14 @@ def create(cls, node: Node, platform: str = sys.platform) -> TestId: ) @classmethod - def unmarshal(cls, marshalled: Dict[str, Any]) -> TestId: + def unmarshal(cls, marshalled: dict[str, Any]) -> TestId: return cls( platform=marshalled["platform"], test_path=tuple(marshalled["test_path"]), ids=tuple(marshalled["ids"]), ) - def marshal(self) -> Dict[str, Any]: + def marshal(self) -> dict[str, Any]: return { "platform": self.platform, "test_path": self.test_path, @@ -630,12 +611,12 @@ class DataTypeProtocol(Protocol): duration: float limit: float - __match_args__: ClassVar[Tuple[str, ...]] = () + __match_args__: ClassVar[tuple[str, ...]] = () @classmethod - def unmarshal(cls: Type[T], marshalled: Dict[str, Any]) -> T: ... + def unmarshal(cls: type[T], marshalled: dict[str, Any]) -> T: ... - def marshal(self) -> Dict[str, Any]: ... + def marshal(self) -> dict[str, Any]: ... T_ComparableEnum = TypeVar("T_ComparableEnum", bound="ComparableEnum") @@ -679,11 +660,11 @@ def __ge__(self: T_ComparableEnum, other: T_ComparableEnum) -> object: return self.value.__ge__(other.value) -def caller_file_and_line(distance: int = 1, relative_to: Iterable[Path] = ()) -> Tuple[str, int]: +def caller_file_and_line(distance: int = 1, relative_to: Iterable[Path] = ()) -> tuple[str, int]: caller = getframeinfo(stack()[distance + 1][0]) caller_path = Path(caller.filename) - options: List[str] = [caller_path.as_posix()] + options: list[str] = [caller_path.as_posix()] for path in relative_to: try: options.append(caller_path.relative_to(path).as_posix()) @@ -694,7 +675,7 @@ def caller_file_and_line(distance: int = 1, relative_to: Iterable[Path] = ()) -> async def add_blocks_in_batches( - blocks: List[FullBlock], + blocks: list[FullBlock], full_node: FullNode, header_hash: Optional[bytes32] = None, ) -> None: diff --git a/chia/_tests/util/protocol_messages_json.py b/chia/_tests/util/protocol_messages_json.py index b4ceb8b1a229..6aa5800fc207 100644 --- a/chia/_tests/util/protocol_messages_json.py +++ b/chia/_tests/util/protocol_messages_json.py @@ -1,9 +1,9 @@ # this file is generated by build_network_protocol_files.py from __future__ import annotations -from typing import Any, Dict +from typing import Any -new_signage_point_json: Dict[str, Any] = { +new_signage_point_json: dict[str, Any] = { "challenge_hash": "0x34b2a753b0dc864e7218f8facf23ca0e2b636351df5289b76f5845d9a78b7026", "challenge_chain_sp": "0x9dc8b9d685c79acdf8780d994416dfcfb118e0adc99769ecfa94e1f40aa5bbe5", "reward_chain_sp": "0xb2828a2c7f6a2555c80c3ca9d10792a7da6ee80f686122ecd2c748dc0569a867", @@ -14,7 +14,7 @@ "sp_source_data": None, } -declare_proof_of_space_json: Dict[str, Any] = { +declare_proof_of_space_json: dict[str, Any] = { "challenge_hash": "0x3f44d177faa11cea40477f233a8b365cce77215a84f48f65a37b2ac35c7e3ccc", "challenge_chain_sp": "0x931c83fd8ef121177257301e11f41642618ddac65509939e252243e41bacbf78", "signage_point_index": 31, @@ -38,7 +38,7 @@ "include_signature_source_data": False, } -request_signed_values_json: Dict[str, Any] = { +request_signed_values_json: dict[str, Any] = { "quality_string": "0x60649de258d2221ca6a178476861b13f8c394a992eaeae1f1159c32bbf703b45", "foliage_block_data_hash": "0x9da23e943246bb99ebeb5e773e35a445bbbfdbd45dd9b9df169eeca80880a53b", "foliage_transaction_block_hash": "0x5d76a4bcb3524d862e92317410583daf50828927885444c6d62ca8843635c46f", @@ -47,7 +47,7 @@ "rc_block_unfinished": None, } -farming_info_json: Dict[str, Any] = { +farming_info_json: dict[str, Any] = { "challenge_hash": "0x345cefad6a04d3ea4fec4b31e56000de622de9fe861afa53424138dd45307fc2", "sp_hash": "0x1105c288abb976e95804796aea5bb6f66a6b500c0f538d4e71f0d701cad9ff11", "timestamp": 16359391077414942762, @@ -57,13 +57,13 @@ "lookup_time": 3942498, } -signed_values_json: Dict[str, Any] = { +signed_values_json: dict[str, Any] = { "quality_string": "0x915de5949724e1fc92d334e589c26ddbcd67415cbbdbbfc5e6de93b3b33bb267", "foliage_block_data_signature": "0xc00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", "foliage_transaction_block_signature": "0xc00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", } -new_peak_json: Dict[str, Any] = { +new_peak_json: dict[str, Any] = { "header_hash": "0x8a346e8dc02e9b44c0571caa74fd99f163d4c5d7deae9f8ddb00528721493f7a", "height": 2653549198, "weight": 196318552117141200341240034145143439804, @@ -71,17 +71,17 @@ "unfinished_reward_block_hash": "0xdd421c55d4edaeeb3ad60e80d73c2005a1b275c381c7e418915200d7467711b5", } -new_transaction_json: Dict[str, Any] = { +new_transaction_json: dict[str, Any] = { "transaction_id": "0xe4fe833328d4e82f9c57bc1fc2082c9b63da23e46927522cb5a073f9f0979b6a", "cost": 13950654730705425115, "fees": 10674036971945712700, } -request_transaction_json: Dict[str, Any] = { +request_transaction_json: dict[str, Any] = { "transaction_id": "0x3dc310a07be53bfd701e4a0d77ce39836eeab4717fe25b1ae4c3f16aad0e5d83" } -respond_transaction_json: Dict[str, Any] = { +respond_transaction_json: dict[str, Any] = { "transaction": { "coin_spends": [ { @@ -98,12 +98,12 @@ } } -request_proof_of_weight_json: Dict[str, Any] = { +request_proof_of_weight_json: dict[str, Any] = { "total_number_of_blocks": 1109907246, "tip": "0x1fa3bfc747762c6edbe9937630e50b6982c3cf4fd67931f2ffcececb8c509839", } -respond_proof_of_weight_json: Dict[str, Any] = { +respond_proof_of_weight_json: dict[str, Any] = { "wp": { "sub_epochs": [ { @@ -380,17 +380,17 @@ "tip": "0xbf71d6f1ecae308aacf87db77aeba5a06f5d1099bfc7005529885e1f2dad857f", } -request_block_json: Dict[str, Any] = {"height": 678860074, "include_transaction_block": False} +request_block_json: dict[str, Any] = {"height": 678860074, "include_transaction_block": False} -reject_block_json: Dict[str, Any] = {"height": 966946253} +reject_block_json: dict[str, Any] = {"height": 966946253} -request_blocks_json: Dict[str, Any] = { +request_blocks_json: dict[str, Any] = { "start_height": 2578479570, "end_height": 3884442719, "include_transaction_block": False, } -respond_blocks_json: Dict[str, Any] = { +respond_blocks_json: dict[str, Any] = { "start_height": 1000, "end_height": 4201431299, "blocks": [ @@ -749,9 +749,9 @@ ], } -reject_blocks_json: Dict[str, Any] = {"start_height": 1160742782, "end_height": 1856800720} +reject_blocks_json: dict[str, Any] = {"start_height": 1160742782, "end_height": 1856800720} -respond_block_json: Dict[str, Any] = { +respond_block_json: dict[str, Any] = { "block": { "finished_sub_slots": [ { @@ -930,15 +930,15 @@ } } -new_unfinished_block_json: Dict[str, Any] = { +new_unfinished_block_json: dict[str, Any] = { "unfinished_reward_hash": "0x229646fb33551966039d9324c0d10166c554d20e9a11e3f30942ec0bb346377e" } -request_unfinished_block_json: Dict[str, Any] = { +request_unfinished_block_json: dict[str, Any] = { "unfinished_reward_hash": "0x8b5e5a59f33bb89e1bfd5aca79409352864e70aa7765c331d641875f83d59d1d" } -respond_unfinished_block_json: Dict[str, Any] = { +respond_unfinished_block_json: dict[str, Any] = { "unfinished_block": { "finished_sub_slots": [ { @@ -1078,20 +1078,20 @@ } } -new_signage_point_or_end_of_subslot_json: Dict[str, Any] = { +new_signage_point_or_end_of_subslot_json: dict[str, Any] = { "prev_challenge_hash": "0xf945510ccea927f832635e56bc20315c92943e108d2b458ac91a290a82e02997", "challenge_hash": "0x27a16b348971e5dfb258e7a01f0b300acbecf8339476afd144e8520f1981833b", "index_from_challenge": 102, "last_rc_infusion": "0xa619471c0ba0b8b8b92b7b2cb1241c2fbb2324c4f1a20a01eb7dcc0027393a56", } -request_signage_point_or_end_of_subslot_json: Dict[str, Any] = { +request_signage_point_or_end_of_subslot_json: dict[str, Any] = { "challenge_hash": "0xedd45b516bf1dc3754c30a99e289639e05f967dc1b590df8a377652bee4f463c", "index_from_challenge": 217, "last_rc_infusion": "0xb574062b42a5b3d76ea141d3b89a4a1096f7797bafe625770047380448622420", } -respond_signage_point_json: Dict[str, Any] = { +respond_signage_point_json: dict[str, Any] = { "index_from_challenge": 111, "challenge_chain_vdf": { "challenge": "0x7cbd5905838c1dc2becd00298a5b3a6e42b6a306d574c8897cd721f84d429972", @@ -1119,7 +1119,7 @@ }, } -respond_end_of_subslot_json: Dict[str, Any] = { +respond_end_of_subslot_json: dict[str, Any] = { "end_of_slot_bundle": { "challenge_chain": { "challenge_chain_end_of_slot_vdf": { @@ -1175,11 +1175,11 @@ } } -request_mempool_transaction_json: Dict[str, Any] = { +request_mempool_transaction_json: dict[str, Any] = { "filter": "0x0000000000000000000000000000000000000000000000000000000000000000" } -new_compact_vdf_json: Dict[str, Any] = { +new_compact_vdf_json: dict[str, Any] = { "height": 1333973478, "header_hash": "0xe2188779d4a8e8fdf9cbe3103878b4c3f5f25a999fa8d04551c4ae01046c634e", "field_vdf": 169, @@ -1192,7 +1192,7 @@ }, } -request_compact_vdf_json: Dict[str, Any] = { +request_compact_vdf_json: dict[str, Any] = { "height": 3529778757, "header_hash": "0x1c02dfbf437c464cfd3f71d2da283c22bd04b2061e3c6b4bfd8b859092957d96", "field_vdf": 207, @@ -1205,7 +1205,7 @@ }, } -respond_compact_vdf_json: Dict[str, Any] = { +respond_compact_vdf_json: dict[str, Any] = { "height": 2759248594, "header_hash": "0x51f2e23ac76179d69bc9232420f47e2a332b8c2495c24ceef7f730feb53c9117", "field_vdf": 167, @@ -1223,33 +1223,33 @@ }, } -request_peers_json: Dict[str, Any] = {} +request_peers_json: dict[str, Any] = {} -respond_peers_json: Dict[str, Any] = {"peer_list": [{"host": "127.0.0.1", "port": 8444, "timestamp": 10796}]} +respond_peers_json: dict[str, Any] = {"peer_list": [{"host": "127.0.0.1", "port": 8444, "timestamp": 10796}]} -new_unfinished_block2_json: Dict[str, Any] = { +new_unfinished_block2_json: dict[str, Any] = { "unfinished_reward_hash": "0x229646fb33551966039d9324c0d10166c554d20e9a11e3f30942ec0bb346377e", "foliage_hash": "0x166c554d20e9a11e3f30942ec0bb346377e229646fb33551966039d9324c0d10", } -request_unfinished_block2_json: Dict[str, Any] = { +request_unfinished_block2_json: dict[str, Any] = { "unfinished_reward_hash": "0x8b5e5a59f33bb89e1bfd5aca79409352864e70aa7765c331d641875f83d59d1d", "foliage_hash": "0xa79409352864e70aa7765c331d641875f83d59d1d8b5e5a59f33bb89e1bfd5ac", } -request_puzzle_solution_json: Dict[str, Any] = { +request_puzzle_solution_json: dict[str, Any] = { "coin_name": "0x6edddb46bd154f50566b49c95812e0f1131a0a7162630349fc8d1d696e463e47", "height": 3905474497, } -puzzle_solution_response_json: Dict[str, Any] = { +puzzle_solution_response_json: dict[str, Any] = { "coin_name": "0x45c4451fdeef92aa0706def2448adfaed8e4a1c0b08a6d303c57de661509c442", "height": 3776325015, "puzzle": "0xff01ffff33ffa0f8912302fb33b8188046662785704afc3dd945074e4b45499a7173946e044695ff8203e880ffff33ffa03eaa52e850322dbc281c6b922e9d8819c7b4120ee054c4aa79db50be516a2bcaff8207d08080", "solution": "0xff01ffff33ffa0f8912302fb33b8188046662785704afc3dd945074e4b45499a7173946e044695ff8203e880ffff33ffa03eaa52e850322dbc281c6b922e9d8819c7b4120ee054c4aa79db50be516a2bcaff8207d08080", } -respond_puzzle_solution_json: Dict[str, Any] = { +respond_puzzle_solution_json: dict[str, Any] = { "response": { "coin_name": "0x45c4451fdeef92aa0706def2448adfaed8e4a1c0b08a6d303c57de661509c442", "height": 3776325015, @@ -1258,12 +1258,12 @@ } } -reject_puzzle_solution_json: Dict[str, Any] = { +reject_puzzle_solution_json: dict[str, Any] = { "coin_name": "0x2f16254e8e7a0b3fbe7bc709d29c5e7d2daa23ce1a2964e3f77b9413055029dd", "height": 2039721496, } -send_transaction_json: Dict[str, Any] = { +send_transaction_json: dict[str, Any] = { "transaction": { "coin_spends": [ { @@ -1280,28 +1280,28 @@ } } -transaction_ack_json: Dict[str, Any] = { +transaction_ack_json: dict[str, Any] = { "txid": "0xfc30d2df70f4ca0a138d5135d352611ddf268ea46c59cde48c29c43d9472532c", "status": 30, "error": "None", } -new_peak_wallet_json: Dict[str, Any] = { +new_peak_wallet_json: dict[str, Any] = { "header_hash": "0xee50e45652cb6a60e3ab0031aa425a6019648fe5344ae860e6fc14af1aa3c2fa", "height": 1093428752, "weight": 207496292293729126634170184354599452208, "fork_point_with_previous_peak": 133681371, } -request_block_header_json: Dict[str, Any] = {"height": 3562957314} +request_block_header_json: dict[str, Any] = {"height": 3562957314} -request_block_headers_json: Dict[str, Any] = { +request_block_headers_json: dict[str, Any] = { "start_height": 1234970524, "end_height": 234653234, "return_filter": False, } -respond_header_block_json: Dict[str, Any] = { +respond_header_block_json: dict[str, Any] = { "header_block": { "finished_sub_slots": [ { @@ -1479,7 +1479,7 @@ } } -respond_block_headers_json: Dict[str, Any] = { +respond_block_headers_json: dict[str, Any] = { "start_height": 923662371, "end_height": 992357623, "header_blocks": [ @@ -1661,15 +1661,15 @@ ], } -reject_header_request_json: Dict[str, Any] = {"height": 17867635} +reject_header_request_json: dict[str, Any] = {"height": 17867635} -request_removals_json: Dict[str, Any] = { +request_removals_json: dict[str, Any] = { "height": 3500751918, "header_hash": "0xb44bc0e0fce20331a57081107dfd30ef39fc436e6e6ce4f6f0ab8db4f981d114", "coin_names": ["0xab62cfb2abaf9e1a475b707c3d3de35d6ef4a298b31137802fd9ea47d48ff0d5"], } -respond_removals_json: Dict[str, Any] = { +respond_removals_json: dict[str, Any] = { "height": 461268095, "header_hash": "0xe2db23a6484b05d9ae1033efe8dcfcf5894fc600a6b93b03782fab8dd1cba8a4", "coins": [ @@ -1685,18 +1685,18 @@ "proofs": [["0x652c312e1dd9f32bf074e17ae8b658bf47711bd1a5e6c937adfb0c80b51fa49d", "0x61616161616161616161"]], } -reject_removals_request_json: Dict[str, Any] = { +reject_removals_request_json: dict[str, Any] = { "height": 3247661701, "header_hash": "0xd5eee2d2ad56663c1c1d1cbde69329862dcf29010683aa7a0da91712d6876caf", } -request_additions_json: Dict[str, Any] = { +request_additions_json: dict[str, Any] = { "height": 2566479739, "header_hash": "0x17262e35437ddc95d43431d20657c096cff95f7ba93a39367f56f1f9df0f0277", "puzzle_hashes": ["0x6fc7b72bc37f462dc820d4b39c9e69e9e65b590ee1a6b0a06b5105d048c278d4"], } -respond_additions_json: Dict[str, Any] = { +respond_additions_json: dict[str, Any] = { "height": 1992350400, "header_hash": "0x449ba349ce403c1acfcd46108758e7ada3a455e7a82dbee90860ec73adb090c9", "coins": [ @@ -1725,16 +1725,16 @@ ], } -reject_additions_json: Dict[str, Any] = { +reject_additions_json: dict[str, Any] = { "height": 3457211200, "header_hash": "0x4eb659e6dd727bc22191795692aae576922e56ae309871c352eede0c9dd8bb12", } -request_header_blocks_json: Dict[str, Any] = {"start_height": 2858301848, "end_height": 720941539} +request_header_blocks_json: dict[str, Any] = {"start_height": 2858301848, "end_height": 720941539} -reject_header_blocks_json: Dict[str, Any] = {"start_height": 876520264, "end_height": 2908717391} +reject_header_blocks_json: dict[str, Any] = {"start_height": 876520264, "end_height": 2908717391} -respond_header_blocks_json: Dict[str, Any] = { +respond_header_blocks_json: dict[str, Any] = { "start_height": 4130100992, "end_height": 17664086, "header_blocks": [ @@ -1916,7 +1916,7 @@ ], } -coin_state_json: Dict[str, Any] = { +coin_state_json: dict[str, Any] = { "coin": { "parent_coin_info": "0xd56f435d3382cb9aa5f50f51816e4c54487c66402339901450f3c810f1d77098", "puzzle_hash": "0x9944f63fcc251719b2f04c47ab976a167f96510736dc6fdfa8e037d740f4b5f3", @@ -1926,14 +1926,14 @@ "created_height": 3361305811, } -register_for_ph_updates_json: Dict[str, Any] = { +register_for_ph_updates_json: dict[str, Any] = { "puzzle_hashes": ["0xdf24b7dc1d5ffa12f112e198cd26385b5ab302b5c2e5f9d589e5cd3f7b900510"], "min_height": 874269130, } -reject_block_headers_json: Dict[str, Any] = {"start_height": 543373229, "end_height": 2347869036} +reject_block_headers_json: dict[str, Any] = {"start_height": 543373229, "end_height": 2347869036} -respond_to_ph_updates_json: Dict[str, Any] = { +respond_to_ph_updates_json: dict[str, Any] = { "puzzle_hashes": ["0x1be3bdc54b84901554e4e843966cfa3be3380054c968bebc41cc6be4aa65322f"], "min_height": 3664709982, "coin_states": [ @@ -1949,12 +1949,12 @@ ], } -register_for_coin_updates_json: Dict[str, Any] = { +register_for_coin_updates_json: dict[str, Any] = { "coin_ids": ["0x1d7748531ece395e8bb8468b112d4ccdd1cea027359abd03c0b015edf666eec8"], "min_height": 3566185528, } -respond_to_coin_updates_json: Dict[str, Any] = { +respond_to_coin_updates_json: dict[str, Any] = { "coin_ids": ["0xdb8bad6bd9de34d4884380176135f31a655dca18e9a5fadfb567145b81b6a9e0"], "min_height": 3818814774, "coin_states": [ @@ -1970,7 +1970,7 @@ ], } -coin_state_update_json: Dict[str, Any] = { +coin_state_update_json: dict[str, Any] = { "height": 855344561, "fork_height": 1659753011, "peak_hash": "0x8512cc80a2976c81186e8963bc7af9d6d5732ccae5227fffee823f0bf3081e76", @@ -1987,11 +1987,11 @@ ], } -request_children_json: Dict[str, Any] = { +request_children_json: dict[str, Any] = { "coin_name": "0x15beeed2e6dd0cf1b81a3f68a49845c020912218e4c1f002a1b3f43333495478" } -respond_children_json: Dict[str, Any] = { +respond_children_json: dict[str, Any] = { "coin_states": [ { "coin": { @@ -2005,21 +2005,21 @@ ] } -request_ses_info_json: Dict[str, Any] = {"start_height": 2704205398, "end_height": 2050258406} +request_ses_info_json: dict[str, Any] = {"start_height": 2704205398, "end_height": 2050258406} -respond_ses_info_json: Dict[str, Any] = { +respond_ses_info_json: dict[str, Any] = { "reward_chain_hash": ["0xb61cb91773995e99cb8259609c0985f915a5734a1706aeab9342a2d1c5abf71b"], "heights": [[1, 2, 3], [4, 606340525]], } -coin_state_filters_json: Dict[str, Any] = { +coin_state_filters_json: dict[str, Any] = { "include_spent": True, "include_unspent": True, "include_hinted": True, "min_amount": 0, } -request_remove_puzzle_subscriptions_json: Dict[str, Any] = { +request_remove_puzzle_subscriptions_json: dict[str, Any] = { "puzzle_hashes": [ "0x59710628755b6d7f7d0b5d84d5c980e7a1c52e55f5a43b531312402bd9045da7", "0xd4a68c9dc42d625092c3e71a657cce469ae4180d1b0632256d2da8ffc0a9beca", @@ -2027,7 +2027,7 @@ ] } -respond_remove_puzzle_subscriptions_json: Dict[str, Any] = { +respond_remove_puzzle_subscriptions_json: dict[str, Any] = { "puzzle_hashes": [ "0x59710628755b6d7f7d0b5d84d5c980e7a1c52e55f5a43b531312402bd9045da7", "0xd4a68c9dc42d625092c3e71a657cce469ae4180d1b0632256d2da8ffc0a9beca", @@ -2035,7 +2035,7 @@ ] } -request_remove_coin_subscriptions_json: Dict[str, Any] = { +request_remove_coin_subscriptions_json: dict[str, Any] = { "coin_ids": [ "0x59710628755b6d7f7d0b5d84d5c980e7a1c52e55f5a43b531312402bd9045da7", "0xd4a68c9dc42d625092c3e71a657cce469ae4180d1b0632256d2da8ffc0a9beca", @@ -2043,7 +2043,7 @@ ] } -respond_remove_coin_subscriptions_json: Dict[str, Any] = { +respond_remove_coin_subscriptions_json: dict[str, Any] = { "coin_ids": [ "0x59710628755b6d7f7d0b5d84d5c980e7a1c52e55f5a43b531312402bd9045da7", "0xd4a68c9dc42d625092c3e71a657cce469ae4180d1b0632256d2da8ffc0a9beca", @@ -2051,7 +2051,7 @@ ] } -request_puzzle_state_json: Dict[str, Any] = { +request_puzzle_state_json: dict[str, Any] = { "puzzle_hashes": [ "0x59710628755b6d7f7d0b5d84d5c980e7a1c52e55f5a43b531312402bd9045da7", "0xd4a68c9dc42d625092c3e71a657cce469ae4180d1b0632256d2da8ffc0a9beca", @@ -2063,9 +2063,9 @@ "subscribe_when_finished": True, } -reject_puzzle_state_json: Dict[str, Any] = {"reason": 0} +reject_puzzle_state_json: dict[str, Any] = {"reason": 0} -respond_puzzle_state_json: Dict[str, Any] = { +respond_puzzle_state_json: dict[str, Any] = { "puzzle_hashes": [ "0x59710628755b6d7f7d0b5d84d5c980e7a1c52e55f5a43b531312402bd9045da7", "0xd4a68c9dc42d625092c3e71a657cce469ae4180d1b0632256d2da8ffc0a9beca", @@ -2087,7 +2087,7 @@ ], } -request_coin_state_json: Dict[str, Any] = { +request_coin_state_json: dict[str, Any] = { "coin_ids": [ "0x59710628755b6d7f7d0b5d84d5c980e7a1c52e55f5a43b531312402bd9045da7", "0xd4a68c9dc42d625092c3e71a657cce469ae4180d1b0632256d2da8ffc0a9beca", @@ -2098,7 +2098,7 @@ "subscribe": False, } -respond_coin_state_json: Dict[str, Any] = { +respond_coin_state_json: dict[str, Any] = { "coin_ids": [ "0x59710628755b6d7f7d0b5d84d5c980e7a1c52e55f5a43b531312402bd9045da7", "0xd4a68c9dc42d625092c3e71a657cce469ae4180d1b0632256d2da8ffc0a9beca", @@ -2117,11 +2117,11 @@ ], } -reject_coin_state_json: Dict[str, Any] = {"reason": 1} +reject_coin_state_json: dict[str, Any] = {"reason": 1} -request_cost_info_json: Dict[str, Any] = {} +request_cost_info_json: dict[str, Any] = {} -respond_cost_info_json: Dict[str, Any] = { +respond_cost_info_json: dict[str, Any] = { "max_transaction_cost": 100000, "max_block_cost": 1000000, "max_mempool_cost": 10000000, @@ -2130,13 +2130,13 @@ "bump_fee_per_cost": 10, } -pool_difficulty_json: Dict[str, Any] = { +pool_difficulty_json: dict[str, Any] = { "difficulty": 14819251421858580996, "sub_slot_iters": 12852879676624401630, "pool_contract_puzzle_hash": "0xc9423123ea65e6923e973b95531b4874570dae942cb757a2daec4a6971753886", } -harvester_handhsake_json: Dict[str, Any] = { +harvester_handhsake_json: dict[str, Any] = { "farmer_public_keys": [ "0xa04c6b5ac7dfb935f6feecfdd72348ccf1d4be4fe7e26acf271ea3b7d308da61e0a308f7a62495328a81f5147b66634c" ], @@ -2145,7 +2145,7 @@ ], } -new_signage_point_harvester_json: Dict[str, Any] = { +new_signage_point_harvester_json: dict[str, Any] = { "challenge_hash": "0xe342c21b4aeaa52349d42492be934692db58494ca9bce4a8697d06fdf8e583bb", "difficulty": 15615706268399948682, "sub_slot_iters": 10520767421667792980, @@ -2161,7 +2161,7 @@ "filter_prefix_bits": 9, } -new_proof_of_space_json: Dict[str, Any] = { +new_proof_of_space_json: dict[str, Any] = { "challenge_hash": "0x1b64ec6bf3fe33bb80eca5b64ff1c88be07771eaed1e98a7199510522087e56e", "sp_hash": "0xad1f8a74376ce8c5c93b7fbb355c2fb6d689ae4f4a7134166593d95265a3da30", "plot_identifier": "plot_1", @@ -2179,7 +2179,7 @@ "fee_info": {"applied_fee_threshold": 1337}, } -request_signatures_json: Dict[str, Any] = { +request_signatures_json: dict[str, Any] = { "plot_identifier": "plot_1", "challenge_hash": "0xb5fa873020fa8b959d89bc2ffc5797501bf870ac8b30437cd6b4fcdea0812789", "sp_hash": "0xbccb7744192771f3a7abca2bce6ea03ed53f1f0d991c13bd2711ce32a2fb3777", @@ -2245,7 +2245,7 @@ }, } -respond_signatures_json: Dict[str, Any] = { +respond_signatures_json: dict[str, Any] = { "plot_identifier": "plot_1", "challenge_hash": "0x59468dce63b5b08490ec4eec4c461fc84b69b6f80a64f4c76b0d55780f7e7e7a", "sp_hash": "0x270b5fc00545db714077aba3b60245d769f492563f108a73b2b8502503d12b9e", @@ -2261,7 +2261,7 @@ "farmer_reward_address_override": "0xcb3ddb903f52818724bb3b32fd5310f5e8623697561930dca73cb9da5dd9349c", } -plot_json: Dict[str, Any] = { +plot_json: dict[str, Any] = { "filename": "plot_1", "size": 124, "plot_id": "0xb2eb7e5c5239e8610a9dd0e137e185966ebb430faf31ae4a0e55d86251065b98", @@ -2273,9 +2273,9 @@ "compression_level": 0, } -request_plots_json: Dict[str, Any] = {} +request_plots_json: dict[str, Any] = {} -respond_plots_json: Dict[str, Any] = { +respond_plots_json: dict[str, Any] = { "plots": [ { "filename": "plot_1", @@ -2293,20 +2293,20 @@ "no_key_filenames": ["str"], } -request_peers_introducer_json: Dict[str, Any] = {} +request_peers_introducer_json: dict[str, Any] = {} -respond_peers_introducer_json: Dict[str, Any] = { +respond_peers_introducer_json: dict[str, Any] = { "peer_list": [{"host": "127.0.0.1", "port": 49878, "timestamp": 15079028934557257795}] } -authentication_payload_json: Dict[str, Any] = { +authentication_payload_json: dict[str, Any] = { "method_name": "method", "launcher_id": "0x0251e3b3a1aacc689091b6b085be7a8d319bd9d1a015faae969cb76d8a45607c", "target_puzzle_hash": "0x9de241b508b5e9e2073b7645291cfaa9458d33935340399a861acf2ee1770440", "authentication_token": 4676522834655707230, } -get_pool_info_response_json: Dict[str, Any] = { +get_pool_info_response_json: dict[str, Any] = { "name": "pool_name", "logo_url": "pool_name", "minimum_difficulty": 7020711482626732214, @@ -2318,7 +2318,7 @@ "authentication_token_timeout": 76, } -post_partial_payload_json: Dict[str, Any] = { +post_partial_payload_json: dict[str, Any] = { "launcher_id": "0xdada61e179e67e5e8bc7aaab16e192facf0f15871f0c479d2a96ac5f85721a1a", "authentication_token": 2491521039628830788, "proof_of_space": { @@ -2334,7 +2334,7 @@ "harvester_id": "0xf98dff6bdcc3926b33cb8ab22e11bd15c13d6a9b6832ac948b3273f5ccd8e7ec", } -post_partial_request_json: Dict[str, Any] = { +post_partial_request_json: dict[str, Any] = { "payload": { "launcher_id": "0xdada61e179e67e5e8bc7aaab16e192facf0f15871f0c479d2a96ac5f85721a1a", "authentication_token": 2491521039628830788, @@ -2353,16 +2353,16 @@ "aggregate_signature": "0xc00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", } -post_partial_response_json: Dict[str, Any] = {"new_difficulty": 5956480724816802941} +post_partial_response_json: dict[str, Any] = {"new_difficulty": 5956480724816802941} -get_farmer_response_json: Dict[str, Any] = { +get_farmer_response_json: dict[str, Any] = { "authentication_public_key": "0xa04c6b5ac7dfb935f6feecfdd72348ccf1d4be4fe7e26acf271ea3b7d308da61e0a308f7a62495328a81f5147b66634c", "payout_instructions": "instructions", "current_difficulty": 8362834206591090467, "current_points": 14310455844127802841, } -post_farmer_payload_json: Dict[str, Any] = { +post_farmer_payload_json: dict[str, Any] = { "launcher_id": "0xd3785b251b4e066f87784d06afc8e6ac8dac5a4922d994902c1bad60b5fa7ad3", "authentication_token": 5820795488800541986, "authentication_public_key": "0xa04c6b5ac7dfb935f6feecfdd72348ccf1d4be4fe7e26acf271ea3b7d308da61e0a308f7a62495328a81f5147b66634c", @@ -2370,7 +2370,7 @@ "suggested_difficulty": 1996244065095983466, } -post_farmer_request_json: Dict[str, Any] = { +post_farmer_request_json: dict[str, Any] = { "payload": { "launcher_id": "0xd3785b251b4e066f87784d06afc8e6ac8dac5a4922d994902c1bad60b5fa7ad3", "authentication_token": 5820795488800541986, @@ -2381,9 +2381,9 @@ "signature": "0xc00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", } -post_farmer_response_json: Dict[str, Any] = {"welcome_message": "welcome"} +post_farmer_response_json: dict[str, Any] = {"welcome_message": "welcome"} -put_farmer_payload_json: Dict[str, Any] = { +put_farmer_payload_json: dict[str, Any] = { "launcher_id": "0x78aec4d523b0bea49829a1322d5de92a86a553ce8774690b8c8ad5fc1f7540a8", "authentication_token": 15049374353843709257, "authentication_public_key": "0xa04c6b5ac7dfb935f6feecfdd72348ccf1d4be4fe7e26acf271ea3b7d308da61e0a308f7a62495328a81f5147b66634c", @@ -2391,7 +2391,7 @@ "suggested_difficulty": 201241879360854600, } -put_farmer_request_json: Dict[str, Any] = { +put_farmer_request_json: dict[str, Any] = { "payload": { "launcher_id": "0x78aec4d523b0bea49829a1322d5de92a86a553ce8774690b8c8ad5fc1f7540a8", "authentication_token": 15049374353843709257, @@ -2402,15 +2402,15 @@ "signature": "0xc00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", } -put_farmer_response_json: Dict[str, Any] = { +put_farmer_response_json: dict[str, Any] = { "authentication_public_key": False, "payout_instructions": False, "suggested_difficulty": True, } -error_response_json: Dict[str, Any] = {"error_code": 47018, "error_message": "err"} +error_response_json: dict[str, Any] = {"error_code": 47018, "error_message": "err"} -new_peak_timelord_json: Dict[str, Any] = { +new_peak_timelord_json: dict[str, Any] = { "reward_chain_block": { "weight": 187084448821891925757676377381787790114, "height": 301889038, @@ -2481,7 +2481,7 @@ "passes_ses_height_but_not_yet_included": True, } -new_unfinished_block_timelord_json: Dict[str, Any] = { +new_unfinished_block_timelord_json: dict[str, Any] = { "reward_chain_block": { "total_iters": 147405131564197136044258885592706844266, "signage_point_index": 9, @@ -2540,7 +2540,7 @@ "rc_prev": "0x0f90296b605904a794e4e98852e3b22e0d9bee2fa07abb12df6cecbdb778e1e5", } -new_infusion_point_vdf_json: Dict[str, Any] = { +new_infusion_point_vdf_json: dict[str, Any] = { "unfinished_reward_hash": "0x3d3b977d3a3dab50f0cd72b74b2f08f5018fb5ef826a8773161b7a499dafa60f", "challenge_chain_ip_vdf": { "challenge": "0x7cbd5905838c1dc2becd00298a5b3a6e42b6a306d574c8897cd721f84d429972", @@ -2580,7 +2580,7 @@ }, } -new_signage_point_vdf_json: Dict[str, Any] = { +new_signage_point_vdf_json: dict[str, Any] = { "index_from_challenge": 182, "challenge_chain_sp_vdf": { "challenge": "0x7cbd5905838c1dc2becd00298a5b3a6e42b6a306d574c8897cd721f84d429972", @@ -2608,7 +2608,7 @@ }, } -new_end_of_sub_slot_bundle_json: Dict[str, Any] = { +new_end_of_sub_slot_bundle_json: dict[str, Any] = { "end_of_sub_slot_bundle": { "challenge_chain": { "challenge_chain_end_of_slot_vdf": { @@ -2664,7 +2664,7 @@ } } -request_compact_proof_of_time_json: Dict[str, Any] = { +request_compact_proof_of_time_json: dict[str, Any] = { "new_proof_of_time": { "challenge": "0x7cbd5905838c1dc2becd00298a5b3a6e42b6a306d574c8897cd721f84d429972", "number_of_iterations": 14708638287767651172, @@ -2677,7 +2677,7 @@ "field_vdf": 237, } -respond_compact_proof_of_time_json: Dict[str, Any] = { +respond_compact_proof_of_time_json: dict[str, Any] = { "vdf_info": { "challenge": "0x7cbd5905838c1dc2becd00298a5b3a6e42b6a306d574c8897cd721f84d429972", "number_of_iterations": 14708638287767651172, @@ -2695,6 +2695,6 @@ "field_vdf": 224, } -error_without_data_json: Dict[str, Any] = {"code": 1, "message": "Unknown", "data": None} +error_without_data_json: dict[str, Any] = {"code": 1, "message": "Unknown", "data": None} -error_with_data_json: Dict[str, Any] = {"code": 1, "message": "Unknown", "data": "0x65787472612064617461"} +error_with_data_json: dict[str, Any] = {"code": 1, "message": "Unknown", "data": "0x65787472612064617461"} diff --git a/chia/_tests/util/run_block.py b/chia/_tests/util/run_block.py index be8c5ed4deba..6b029a19d425 100644 --- a/chia/_tests/util/run_block.py +++ b/chia/_tests/util/run_block.py @@ -3,7 +3,7 @@ import json from dataclasses import dataclass from pathlib import Path -from typing import Any, Dict, List, Tuple +from typing import Any from chia_rs import Coin @@ -27,7 +27,7 @@ class NPC: coin_name: bytes32 puzzle_hash: bytes32 - conditions: List[Tuple[ConditionOpcode, List[ConditionWithArgs]]] + conditions: list[tuple[ConditionOpcode, list[ConditionWithArgs]]] @dataclass @@ -36,23 +36,23 @@ class CAT: memo: str npc: NPC - def cat_to_dict(self) -> Dict[str, Any]: + def cat_to_dict(self) -> dict[str, Any]: return {"asset_id": self.asset_id, "memo": self.memo, "npc": npc_to_dict(self.npc)} -def condition_with_args_to_dict(condition_with_args: ConditionWithArgs) -> Dict[str, Any]: +def condition_with_args_to_dict(condition_with_args: ConditionWithArgs) -> dict[str, Any]: return { "condition_opcode": condition_with_args.opcode.name, "arguments": [arg.hex() for arg in condition_with_args.vars], } -def condition_list_to_dict(condition_list: Tuple[ConditionOpcode, List[ConditionWithArgs]]) -> List[Dict[str, Any]]: +def condition_list_to_dict(condition_list: tuple[ConditionOpcode, list[ConditionWithArgs]]) -> list[dict[str, Any]]: assert all([condition_list[0] == cwa.opcode for cwa in condition_list[1]]) return [condition_with_args_to_dict(cwa) for cwa in condition_list[1]] -def npc_to_dict(npc: NPC) -> Dict[str, Any]: +def npc_to_dict(npc: NPC) -> dict[str, Any]: return { "coin_name": npc.coin_name.hex(), "conditions": [{"condition_type": c[0].name, "conditions": condition_list_to_dict(c)} for c in npc.conditions], @@ -60,13 +60,13 @@ def npc_to_dict(npc: NPC) -> Dict[str, Any]: } -def run_generator(block_generator: BlockGenerator, constants: ConsensusConstants, max_cost: int) -> List[CAT]: +def run_generator(block_generator: BlockGenerator, constants: ConsensusConstants, max_cost: int) -> list[CAT]: block_args = block_generator.generator_refs cost, block_result = block_generator.program.run_with_cost(max_cost, [DESERIALIZE_MOD, block_args]) coin_spends = block_result.first() - cat_list: List[CAT] = [] + cat_list: list[CAT] = [] for spend in coin_spends.as_iter(): parent, puzzle, amount, solution = spend.as_iter() args = match_cat_puzzle(uncurry_puzzle(puzzle)) @@ -79,7 +79,7 @@ def run_generator(block_generator: BlockGenerator, constants: ConsensusConstants puzzle_result = puzzle.run(solution) - conds: Dict[ConditionOpcode, List[ConditionWithArgs]] = {} + conds: dict[ConditionOpcode, list[ConditionWithArgs]] = {} for condition in puzzle_result.as_python(): op = ConditionOpcode(condition[0]) @@ -126,7 +126,7 @@ def run_generator(block_generator: BlockGenerator, constants: ConsensusConstants return cat_list -def ref_list_to_args(ref_list: List[uint32], root_path: Path) -> List[bytes]: +def ref_list_to_args(ref_list: list[uint32], root_path: Path) -> list[bytes]: args = [] for height in ref_list: with open(root_path / f"{height}.json", "rb") as f: @@ -139,10 +139,10 @@ def ref_list_to_args(ref_list: List[uint32], root_path: Path) -> List[bytes]: def run_generator_with_args( generator_program_hex: str, - generator_args: List[bytes], + generator_args: list[bytes], constants: ConsensusConstants, cost: uint64, -) -> List[CAT]: +) -> list[CAT]: if not generator_program_hex: return [] generator_program = SerializedProgram.fromhex(generator_program_hex) @@ -150,11 +150,11 @@ def run_generator_with_args( return run_generator(block_generator, constants, min(constants.MAX_BLOCK_COST_CLVM, cost)) -def run_json_block(full_block: Dict[str, Any], parent: Path, constants: ConsensusConstants) -> List[CAT]: +def run_json_block(full_block: dict[str, Any], parent: Path, constants: ConsensusConstants) -> list[CAT]: ref_list = full_block["block"]["transactions_generator_ref_list"] - tx_info: Dict[str, Any] = full_block["block"]["transactions_info"] + tx_info: dict[str, Any] = full_block["block"]["transactions_info"] generator_program_hex: str = full_block["block"]["transactions_generator"] - cat_list: List[CAT] = [] + cat_list: list[CAT] = [] if tx_info and generator_program_hex: cost = tx_info["cost"] args = ref_list_to_args(ref_list, parent) diff --git a/chia/_tests/util/setup_nodes.py b/chia/_tests/util/setup_nodes.py index e1feceb6979c..de4c2cf2bbad 100644 --- a/chia/_tests/util/setup_nodes.py +++ b/chia/_tests/util/setup_nodes.py @@ -3,10 +3,11 @@ import asyncio import contextlib import logging +from collections.abc import AsyncIterator from contextlib import AsyncExitStack, ExitStack, asynccontextmanager from dataclasses import dataclass from pathlib import Path -from typing import AsyncIterator, Dict, List, Optional, Tuple, Union +from typing import Optional, Union import anyio @@ -46,8 +47,8 @@ from chia.util.timing import adjusted_timeout, backoff_times from chia.wallet.wallet_node import WalletNode -OldSimulatorsAndWallets = Tuple[List[FullNodeSimulator], List[Tuple[WalletNode, ChiaServer]], BlockTools] -SimulatorsAndWalletsServices = Tuple[List[SimulatorFullNodeService], List[WalletService], BlockTools] +OldSimulatorsAndWallets = tuple[list[FullNodeSimulator], list[tuple[WalletNode, ChiaServer]], BlockTools] +SimulatorsAndWalletsServices = tuple[list[SimulatorFullNodeService], list[WalletService], BlockTools] @dataclass(frozen=True) @@ -64,8 +65,8 @@ class FullSystem: @dataclass class SimulatorsAndWallets: - simulators: List[FullNodeEnvironment] - wallets: List[WalletEnvironment] + simulators: list[FullNodeEnvironment] + wallets: list[WalletEnvironment] bt: BlockTools @@ -79,7 +80,7 @@ def cleanup_keyring(keyring: TempKeyring) -> None: @asynccontextmanager async def setup_two_nodes( consensus_constants: ConsensusConstants, db_version: int, self_hostname: str -) -> AsyncIterator[Tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools]]: +) -> AsyncIterator[tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools]]: """ Setup and teardown of two full nodes, with blockchains and separate DBs. """ @@ -116,7 +117,7 @@ async def setup_two_nodes( @asynccontextmanager async def setup_n_nodes( consensus_constants: ConsensusConstants, n: int, db_version: int, self_hostname: str -) -> AsyncIterator[List[FullNodeAPI]]: +) -> AsyncIterator[list[FullNodeAPI]]: """ Setup and teardown of n full nodes, with blockchains and separate DBs. """ @@ -154,8 +155,8 @@ async def setup_simulators_and_wallets( key_seed: Optional[bytes32] = None, initial_num_public_keys: int = 5, db_version: int = 2, - config_overrides: Optional[Dict[str, int]] = None, - disable_capabilities: Optional[List[Capability]] = None, + config_overrides: Optional[dict[str, int]] = None, + disable_capabilities: Optional[list[Capability]] = None, ) -> AsyncIterator[SimulatorsAndWallets]: with TempKeyring(populate=True) as keychain1, TempKeyring(populate=True) as keychain2: if config_overrides is None: @@ -175,7 +176,7 @@ async def setup_simulators_and_wallets( disable_capabilities, ) as (bt_tools, simulators, wallets_services): async with contextlib.AsyncExitStack() as exit_stack: - wallets: List[WalletEnvironment] = [] + wallets: list[WalletEnvironment] = [] for service in wallets_services: assert service.rpc_server is not None @@ -207,9 +208,9 @@ async def setup_simulators_and_wallets_service( key_seed: Optional[bytes32] = None, initial_num_public_keys: int = 5, db_version: int = 2, - config_overrides: Optional[Dict[str, int]] = None, - disable_capabilities: Optional[List[Capability]] = None, -) -> AsyncIterator[Tuple[List[SimulatorFullNodeService], List[WalletService], BlockTools]]: + config_overrides: Optional[dict[str, int]] = None, + disable_capabilities: Optional[list[Capability]] = None, +) -> AsyncIterator[tuple[list[SimulatorFullNodeService], list[WalletService], BlockTools]]: with TempKeyring(populate=True) as keychain1, TempKeyring(populate=True) as keychain2: async with setup_simulators_and_wallets_inner( db_version, @@ -240,13 +241,13 @@ async def setup_simulators_and_wallets_inner( spam_filter_after_n_txs: int, wallet_count: int, xch_spam_amount: int, - config_overrides: Optional[Dict[str, int]], - disable_capabilities: Optional[List[Capability]], -) -> AsyncIterator[Tuple[List[BlockTools], List[SimulatorFullNodeService], List[WalletService]]]: + config_overrides: Optional[dict[str, int]], + disable_capabilities: Optional[list[Capability]], +) -> AsyncIterator[tuple[list[BlockTools], list[SimulatorFullNodeService], list[WalletService]]]: if config_overrides is not None and "full_node.max_sync_wait" not in config_overrides: config_overrides["full_node.max_sync_wait"] = 0 async with AsyncExitStack() as async_exit_stack: - bt_tools: List[BlockTools] = [ + bt_tools: list[BlockTools] = [ await create_block_tools_async(consensus_constants, keychain=keychain1, config_overrides=config_overrides) for _ in range(0, simulator_count) ] @@ -258,7 +259,7 @@ async def setup_simulators_and_wallets_inner( ) ) - simulators: List[SimulatorFullNodeService] = [ + simulators: list[SimulatorFullNodeService] = [ await async_exit_stack.enter_async_context( # Passing simulator=True gets us this type guaranteed setup_full_node( # type: ignore[arg-type] @@ -274,7 +275,7 @@ async def setup_simulators_and_wallets_inner( for index in range(0, simulator_count) ] - wallets: List[WalletService] = [ + wallets: list[WalletService] = [ await async_exit_stack.enter_async_context( setup_wallet_node( bt_tools[index].config["self_hostname"], @@ -301,7 +302,7 @@ async def setup_farmer_multi_harvester( consensus_constants: ConsensusConstants, *, start_services: bool, -) -> AsyncIterator[Tuple[List[HarvesterService], FarmerService, BlockTools]]: +) -> AsyncIterator[tuple[list[HarvesterService], FarmerService, BlockTools]]: async with AsyncExitStack() as async_exit_stack: farmer_service = await async_exit_stack.enter_async_context( setup_farmer( diff --git a/chia/_tests/util/split_managers.py b/chia/_tests/util/split_managers.py index 36457c0f45d9..75a44d0f82e2 100644 --- a/chia/_tests/util/split_managers.py +++ b/chia/_tests/util/split_managers.py @@ -1,8 +1,9 @@ from __future__ import annotations import contextlib +from collections.abc import AsyncIterator, Iterator from dataclasses import dataclass -from typing import AsyncContextManager, AsyncIterator, ContextManager, Generic, Iterator, List, TypeVar +from typing import AsyncContextManager, ContextManager, Generic, TypeVar T = TypeVar("T") @@ -16,7 +17,7 @@ class SplitManager(Generic[T]): _exited: bool = False def enter(self) -> None: - messages: List[str] = [] + messages: list[str] = [] if self._entered: messages.append("already entered") if self._exited: @@ -31,7 +32,7 @@ def exit(self, if_needed: bool = False) -> None: if if_needed and (not self._entered or self._exited): return - messages: List[str] = [] + messages: list[str] = [] if not self._entered: messages.append("not yet entered") if self._exited: @@ -52,7 +53,7 @@ class SplitAsyncManager(Generic[T]): _exited: bool = False async def enter(self) -> None: - messages: List[str] = [] + messages: list[str] = [] if self._entered: messages.append("already entered") if self._exited: @@ -67,7 +68,7 @@ async def exit(self, if_needed: bool = False) -> None: if if_needed and (not self._entered or self._exited): return - messages: List[str] = [] + messages: list[str] = [] if not self._entered: messages.append("not yet entered") if self._exited: diff --git a/chia/_tests/util/temp_file.py b/chia/_tests/util/temp_file.py index cc28d04370bd..044c7edd5db5 100644 --- a/chia/_tests/util/temp_file.py +++ b/chia/_tests/util/temp_file.py @@ -2,8 +2,8 @@ import contextlib import tempfile +from collections.abc import Iterator from pathlib import Path -from typing import Iterator @contextlib.contextmanager diff --git a/chia/_tests/util/test_action_scope.py b/chia/_tests/util/test_action_scope.py index 29c7f2446c80..789c34f503d0 100644 --- a/chia/_tests/util/test_action_scope.py +++ b/chia/_tests/util/test_action_scope.py @@ -1,7 +1,8 @@ from __future__ import annotations +from collections.abc import AsyncIterator from dataclasses import dataclass -from typing import AsyncIterator, final +from typing import final import pytest diff --git a/chia/_tests/util/test_async_pool.py b/chia/_tests/util/test_async_pool.py index 9ce256838dfd..770308b03682 100644 --- a/chia/_tests/util/test_async_pool.py +++ b/chia/_tests/util/test_async_pool.py @@ -4,7 +4,7 @@ import random import re from dataclasses import dataclass -from typing import List, Optional, Tuple, Type +from typing import Optional import anyio import pytest @@ -110,7 +110,7 @@ async def worker( worker_async_callable=worker, target_worker_count=1, ): - results: List[int] = [] + results: list[int] = [] with anyio.fail_after(adjusted_timeout(10)): for _ in expected_results: @@ -194,7 +194,7 @@ async def worker( for input in inputs: await work_queue.put(input) - results: List[int] = [] + results: list[int] = [] with anyio.fail_after(adjusted_timeout(10)): for _ in inputs: @@ -228,7 +228,7 @@ async def worker( for job in jobs: await work_queue.put(job) - results: List[int] = [] + results: list[int] = [] with anyio.fail_after(adjusted_timeout(10)): for _ in expected_results: @@ -266,7 +266,7 @@ async def worker( await work_queue.put(job) - results: List[int] = [] + results: list[int] = [] with anyio.fail_after(adjusted_timeout(10)): for _ in expected_results: @@ -304,7 +304,7 @@ async def worker( for job in jobs: await work_queue.put(job) - results: List[int] = [] + results: list[int] = [] with anyio.fail_after(adjusted_timeout(10)): for job in jobs: await job.started.wait() @@ -346,7 +346,7 @@ async def worker( for job in jobs: await work_queue.put(job) - results: List[int] = [] + results: list[int] = [] with anyio.fail_after(adjusted_timeout(10)): for _ in expected_results: @@ -355,7 +355,7 @@ async def worker( raising_jobs = [job for job in jobs if job.input in raises] expected_exceptions = [(Exception, (job.input,)) for job in raising_jobs] - exceptions: List[Tuple[Type[BaseException], Tuple[int]]] = [] + exceptions: list[tuple[type[BaseException], tuple[int]]] = [] for job in raising_jobs: exception = job.exception assert isinstance(exception, BaseException) diff --git a/chia/_tests/util/test_build_job_matrix.py b/chia/_tests/util/test_build_job_matrix.py index cc2e14d43f60..f57477fd3b3d 100644 --- a/chia/_tests/util/test_build_job_matrix.py +++ b/chia/_tests/util/test_build_job_matrix.py @@ -4,14 +4,13 @@ import pathlib import subprocess import sys -from typing import Dict, List import chia._tests build_job_matrix_path = pathlib.Path(chia._tests.__file__).with_name("build-job-matrix.py") -def run(args: List[str]) -> str: +def run(args: list[str]) -> str: completed_process = subprocess.run( [sys.executable, build_job_matrix_path, *args], check=True, @@ -22,7 +21,7 @@ def run(args: List[str]) -> str: def test() -> None: - timeouts: Dict[int, Dict[str, int]] = {} + timeouts: dict[int, dict[str, int]] = {} multipliers = [1, 2, 3] diff --git a/chia/_tests/util/test_condition_tools.py b/chia/_tests/util/test_condition_tools.py index 1a939c38baf7..b788e7b98251 100644 --- a/chia/_tests/util/test_condition_tools.py +++ b/chia/_tests/util/test_condition_tools.py @@ -1,7 +1,5 @@ from __future__ import annotations -from typing import List, Tuple - import pytest from chia_rs import G1Element from clvm.casts import int_to_bytes @@ -29,8 +27,8 @@ def mk_agg_sig_conditions( opcode: ConditionOpcode, - agg_sig_data: List[Tuple[G1Element, bytes]], - agg_sig_unsafe_data: List[Tuple[G1Element, bytes]] = [], + agg_sig_data: list[tuple[G1Element, bytes]], + agg_sig_unsafe_data: list[tuple[G1Element, bytes]] = [], ) -> SpendBundleConditions: spend = SpendConditions( coin_id=TEST_COIN.name(), diff --git a/chia/_tests/util/test_config.py b/chia/_tests/util/test_config.py index fe54c1e5d922..864a987ce353 100644 --- a/chia/_tests/util/test_config.py +++ b/chia/_tests/util/test_config.py @@ -1,7 +1,7 @@ from __future__ import annotations from dataclasses import dataclass -from typing import Any, Dict, Optional, Set +from typing import Any, Optional from chia._tests.util.misc import DataCase, Marks, datacases from chia.server.outbound_message import NodeType @@ -13,9 +13,9 @@ @dataclass class GetUnresolvedPeerInfosCase(DataCase): description: str - service_config: Dict[str, Any] + service_config: dict[str, Any] requested_node_type: NodeType - expected_peer_infos: Set[UnresolvedPeerInfo] + expected_peer_infos: set[UnresolvedPeerInfo] marks: Marks = () @property @@ -154,9 +154,9 @@ def test_get_unresolved_peer_infos(case: GetUnresolvedPeerInfosCase) -> None: @dataclass class SetPeerInfoCase(DataCase): description: str - service_config: Dict[str, Any] + service_config: dict[str, Any] requested_node_type: NodeType - expected_service_config: Dict[str, Any] + expected_service_config: dict[str, Any] peer_host: Optional[str] = None peer_port: Optional[int] = None marks: Marks = () diff --git a/chia/_tests/util/test_dump_keyring.py b/chia/_tests/util/test_dump_keyring.py index f439fa7a645b..fafed48035de 100644 --- a/chia/_tests/util/test_dump_keyring.py +++ b/chia/_tests/util/test_dump_keyring.py @@ -2,8 +2,8 @@ import os import re +from collections.abc import Sequence from dataclasses import dataclass -from typing import Sequence from click.testing import CliRunner diff --git a/chia/_tests/util/test_full_block_utils.py b/chia/_tests/util/test_full_block_utils.py index 05e6c365b92a..b50f0e8d6f1c 100644 --- a/chia/_tests/util/test_full_block_utils.py +++ b/chia/_tests/util/test_full_block_utils.py @@ -1,7 +1,8 @@ from __future__ import annotations import random -from typing import Generator, Iterator, List, Optional +from collections.abc import Generator, Iterator +from typing import Optional import pytest from chia_rs import G1Element, G2Element @@ -27,11 +28,11 @@ from chia.util.generator_tools import get_block_header from chia.util.ints import uint8, uint32, uint64, uint128 -test_g2s: List[G2Element] = [rand_g2() for _ in range(10)] -test_g1s: List[G1Element] = [rand_g1() for _ in range(10)] -test_hashes: List[bytes32] = [rand_hash() for _ in range(100)] -test_vdfs: List[VDFInfo] = [rand_vdf() for _ in range(100)] -test_vdf_proofs: List[VDFProof] = [rand_vdf_proof() for _ in range(100)] +test_g2s: list[G2Element] = [rand_g2() for _ in range(10)] +test_g1s: list[G1Element] = [rand_g1() for _ in range(10)] +test_hashes: list[bytes32] = [rand_hash() for _ in range(100)] +test_vdfs: list[VDFInfo] = [rand_vdf() for _ in range(100)] +test_vdf_proofs: list[VDFProof] = [rand_vdf_proof() for _ in range(100)] def g2() -> G2Element: @@ -200,12 +201,12 @@ def get_end_of_sub_slot() -> Generator[EndOfSubSlotBundle, None, None]: ) -def get_finished_sub_slots() -> Generator[List[EndOfSubSlotBundle], None, None]: +def get_finished_sub_slots() -> Generator[list[EndOfSubSlotBundle], None, None]: yield [] yield [s for s in get_end_of_sub_slot()] -def get_ref_list() -> Generator[List[uint32], None, None]: +def get_ref_list() -> Generator[list[uint32], None, None]: yield [] yield [uint32(1), uint32(2), uint32(3), uint32(4)] yield [uint32(256)] diff --git a/chia/_tests/util/test_misc.py b/chia/_tests/util/test_misc.py index 2b4e265f5fac..2a132e85709a 100644 --- a/chia/_tests/util/test_misc.py +++ b/chia/_tests/util/test_misc.py @@ -1,7 +1,8 @@ from __future__ import annotations import contextlib -from typing import AsyncIterator, Iterator, List, Optional, Tuple, Type, TypeVar +from collections.abc import AsyncIterator, Iterator +from typing import Optional, TypeVar import aiohttp import anyio @@ -89,7 +90,7 @@ def test_validate_directory_writable(tmp_path) -> None: def test_empty_lists() -> None: # An empty list should return an empty iterator and skip the loop's body. - empty: List[int] = [] + empty: list[int] = [] with pytest.raises(StopIteration): next(to_batches(empty, 1)) @@ -129,14 +130,14 @@ def test_invalid_input_type() -> None: @contextlib.contextmanager -def sync_manager(y: List[str]) -> Iterator[None]: +def sync_manager(y: list[str]) -> Iterator[None]: y.append("entered") yield y.append("exited") def test_split_manager_class_works() -> None: - x: List[str] = [] + x: list[str] = [] split = SplitManager(manager=sync_manager(y=x), object=None) assert x == [] @@ -149,7 +150,7 @@ def test_split_manager_class_works() -> None: def test_split_manager_function_exits_if_needed() -> None: - x: List[str] = [] + x: list[str] = [] with split_manager(manager=sync_manager(y=x), object=None) as split: assert x == [] @@ -161,7 +162,7 @@ def test_split_manager_function_exits_if_needed() -> None: def test_split_manager_function_skips_if_not_needed() -> None: - x: List[str] = [] + x: list[str] = [] with split_manager(manager=sync_manager(y=x), object=None) as split: assert x == [] @@ -176,7 +177,7 @@ def test_split_manager_function_skips_if_not_needed() -> None: def test_split_manager_raises_on_second_entry() -> None: - x: List[str] = [] + x: list[str] = [] split = SplitManager(manager=sync_manager(y=x), object=None) split.enter() @@ -186,7 +187,7 @@ def test_split_manager_raises_on_second_entry() -> None: def test_split_manager_raises_on_second_entry_after_exiting() -> None: - x: List[str] = [] + x: list[str] = [] split = SplitManager(manager=sync_manager(y=x), object=None) split.enter() @@ -197,7 +198,7 @@ def test_split_manager_raises_on_second_entry_after_exiting() -> None: def test_split_manager_raises_on_second_exit() -> None: - x: List[str] = [] + x: list[str] = [] split = SplitManager(manager=sync_manager(y=x), object=None) split.enter() @@ -208,7 +209,7 @@ def test_split_manager_raises_on_second_exit() -> None: def test_split_manager_raises_on_exit_without_entry() -> None: - x: List[str] = [] + x: list[str] = [] split = SplitManager(manager=sync_manager(y=x), object=None) @@ -217,7 +218,7 @@ def test_split_manager_raises_on_exit_without_entry() -> None: @contextlib.asynccontextmanager -async def async_manager(y: List[str]) -> AsyncIterator[None]: +async def async_manager(y: list[str]) -> AsyncIterator[None]: y.append("entered") yield y.append("exited") @@ -225,7 +226,7 @@ async def async_manager(y: List[str]) -> AsyncIterator[None]: @pytest.mark.anyio async def test_split_async_manager_class_works() -> None: - x: List[str] = [] + x: list[str] = [] split = SplitAsyncManager(manager=async_manager(y=x), object=None) assert x == [] @@ -239,7 +240,7 @@ async def test_split_async_manager_class_works() -> None: @pytest.mark.anyio async def test_split_async_manager_function_exits_if_needed() -> None: - x: List[str] = [] + x: list[str] = [] async with split_async_manager(manager=async_manager(y=x), object=None) as split: assert x == [] @@ -252,7 +253,7 @@ async def test_split_async_manager_function_exits_if_needed() -> None: @pytest.mark.anyio async def test_split_async_manager_function_skips_if_not_needed() -> None: - x: List[str] = [] + x: list[str] = [] async with split_async_manager(manager=async_manager(y=x), object=None) as split: assert x == [] @@ -268,7 +269,7 @@ async def test_split_async_manager_function_skips_if_not_needed() -> None: @pytest.mark.anyio async def test_split_async_manager_raises_on_second_entry() -> None: - x: List[str] = [] + x: list[str] = [] split = SplitAsyncManager(manager=async_manager(y=x), object=None) await split.enter() @@ -279,7 +280,7 @@ async def test_split_async_manager_raises_on_second_entry() -> None: @pytest.mark.anyio async def test_split_async_manager_raises_on_second_entry_after_exiting() -> None: - x: List[str] = [] + x: list[str] = [] split = SplitAsyncManager(manager=async_manager(y=x), object=None) await split.enter() @@ -291,7 +292,7 @@ async def test_split_async_manager_raises_on_second_entry_after_exiting() -> Non @pytest.mark.anyio async def test_split_async_manager_raises_on_second_exit() -> None: - x: List[str] = [] + x: list[str] = [] split = SplitAsyncManager(manager=async_manager(y=x), object=None) await split.enter() @@ -303,7 +304,7 @@ async def test_split_async_manager_raises_on_second_exit() -> None: @pytest.mark.anyio async def test_split_async_manager_raises_on_exit_without_entry() -> None: - x: List[str] = [] + x: list[str] = [] split = SplitAsyncManager(manager=async_manager(y=x), object=None) @@ -365,7 +366,7 @@ async def test_valued_event_wait_blocks_when_not_set() -> None: @pytest.mark.anyio async def test_valued_event_multiple_waits_all_get_values() -> None: - results: List[int] = [] + results: list[int] = [] valued_event = ValuedEvent[int]() value = 37 task_count = 10 @@ -430,15 +431,15 @@ async def test_recording_web_server_specified_response( (42, uint64, False), (uint64(42), uint64, True), ("42", int, False), - ([4, 2], List[int], True), - ([4, "2"], List[int], False), - ((4, 2), Tuple[int, int], True), - ((4, "2"), Tuple[int, int], False), - ((4, 2), Tuple[int, ...], True), - ((4, "2"), Tuple[int, ...], False), - ([(4, Program.to([2]))], List[Tuple[int, Program]], True), - ([(4, "2")], Tuple[int, str], False), + ([4, 2], list[int], True), + ([4, "2"], list[int], False), + ((4, 2), tuple[int, int], True), + ((4, "2"), tuple[int, int], False), + ((4, 2), tuple[int, ...], True), + ((4, "2"), tuple[int, ...], False), + ([(4, Program.to([2]))], list[tuple[int, Program]], True), + ([(4, "2")], tuple[int, str], False), ], ) -def test_satisfies_hint(obj: T, type_hint: Type[T], expected_result: bool) -> None: +def test_satisfies_hint(obj: T, type_hint: type[T], expected_result: bool) -> None: assert satisfies_hint(obj, type_hint) == expected_result diff --git a/chia/_tests/util/test_network.py b/chia/_tests/util/test_network.py index 69aebe6cb15e..804920b4796a 100644 --- a/chia/_tests/util/test_network.py +++ b/chia/_tests/util/test_network.py @@ -3,7 +3,7 @@ import os import sys from ipaddress import IPv4Address, IPv6Address -from typing import Type, Union +from typing import Union import pytest @@ -49,7 +49,7 @@ async def test_resolve6(): ("93.184.216.34", IPv4Address), ], ) -def test_ip_address(address_string: str, expected_inner: Type[Union[IPv4Address, IPv6Address]]) -> None: +def test_ip_address(address_string: str, expected_inner: type[Union[IPv4Address, IPv6Address]]) -> None: inner = expected_inner(address_string) ip = IPAddress.create(address_string) # Helpers diff --git a/chia/_tests/util/test_network_protocol_files.py b/chia/_tests/util/test_network_protocol_files.py index 688152356ae9..5066af6e3750 100644 --- a/chia/_tests/util/test_network_protocol_files.py +++ b/chia/_tests/util/test_network_protocol_files.py @@ -3,14 +3,13 @@ from __future__ import annotations from pathlib import Path -from typing import Tuple from chia._tests.util.build_network_protocol_files import get_network_protocol_filename from chia._tests.util.network_protocol_data import * # noqa: F403 from chia._tests.util.protocol_messages_json import * # noqa: F403 -def parse_blob(input_bytes: bytes) -> Tuple[bytes, bytes]: +def parse_blob(input_bytes: bytes) -> tuple[bytes, bytes]: size_bytes = input_bytes[:4] input_bytes = input_bytes[4:] size = int.from_bytes(size_bytes, "big") diff --git a/chia/_tests/util/test_network_protocol_test.py b/chia/_tests/util/test_network_protocol_test.py index 766f80efd823..d0adb404e7da 100644 --- a/chia/_tests/util/test_network_protocol_test.py +++ b/chia/_tests/util/test_network_protocol_test.py @@ -3,7 +3,7 @@ import ast import inspect -from typing import Any, Dict, Set, cast +from typing import Any, cast from chia.protocols import ( farmer_protocol, @@ -21,7 +21,7 @@ # stays up to date. It's a test for the test -def types_in_module(mod: Any) -> Set[str]: +def types_in_module(mod: Any) -> set[str]: parsed = ast.parse(inspect.getsource(mod)) types = set() for line in parsed.body: @@ -50,7 +50,7 @@ def test_missing_messages_state_machine() -> None: def test_message_ids() -> None: parsed = ast.parse(inspect.getsource(protocol_message_types)) - message_ids: Dict[int, str] = {} + message_ids: dict[int, str] = {} for line in parsed.body: if not isinstance(line, ast.ClassDef) or line.name != "ProtocolMessageTypes": continue diff --git a/chia/_tests/util/test_paginator.py b/chia/_tests/util/test_paginator.py index 1277d41d29a2..936143659941 100644 --- a/chia/_tests/util/test_paginator.py +++ b/chia/_tests/util/test_paginator.py @@ -1,7 +1,6 @@ from __future__ import annotations from math import ceil -from typing import List, Type import pytest @@ -12,7 +11,7 @@ "source, page_size, page_size_limit", [([], 1, 1), ([1], 1, 2), ([1, 2], 2, 2), ([], 10, 100), ([1, 2, 10], 1000, 1000)], ) -def test_constructor_valid_inputs(source: List[int], page_size: int, page_size_limit: int) -> None: +def test_constructor_valid_inputs(source: list[int], page_size: int, page_size_limit: int) -> None: paginator: Paginator = Paginator.create(source, page_size, page_size_limit) assert paginator.page_size() == page_size assert paginator.page_count() == 1 @@ -29,7 +28,7 @@ def test_constructor_valid_inputs(source: List[int], page_size: int, page_size_l (1001, 1000, InvalidPageSizeError), ], ) -def test_constructor_invalid_inputs(page_size: int, page_size_limit: int, exception: Type[Exception]) -> None: +def test_constructor_invalid_inputs(page_size: int, page_size_limit: int, exception: type[Exception]) -> None: with pytest.raises(exception): Paginator.create([], page_size, page_size_limit) @@ -62,7 +61,7 @@ def test_page_count() -> None: (0, 10, 0, []), ], ) -def test_get_page_valid(length: int, page: int, page_size: int, expected_data: List[int]) -> None: +def test_get_page_valid(length: int, page: int, page_size: int, expected_data: list[int]) -> None: assert Paginator.create(list(range(0, length)), page_size).get_page(page) == expected_data diff --git a/chia/_tests/util/test_priority_mutex.py b/chia/_tests/util/test_priority_mutex.py index d93ecb44f6f6..b2169e9b92c3 100644 --- a/chia/_tests/util/test_priority_mutex.py +++ b/chia/_tests/util/test_priority_mutex.py @@ -8,7 +8,7 @@ import random import time from dataclasses import dataclass -from typing import Callable, List, Optional +from typing import Callable, Optional import anyio import pytest @@ -158,8 +158,8 @@ def before(self, other: Request) -> bool: @dataclass(frozen=True) class OrderCase: - requests: List[Request] - expected_acquisitions: List[str] + requests: list[Request] + expected_acquisitions: list[str] @dataclass @@ -268,7 +268,7 @@ async def test_order(case: OrderCase) -> None: assert sane(requests=case.requests) -def expected_acquisition_order(requests: List[Request]) -> List[Request]: +def expected_acquisition_order(requests: list[Request]) -> list[Request]: first_request, *other_requests = requests return [ first_request, @@ -375,7 +375,7 @@ async def test_retains_request_order_for_matching_priority(seed: int) -> None: assert sane(requests=all_requests) -def sane(requests: List[Request]) -> bool: +def sane(requests: list[Request]) -> bool: if any(not request.completed for request in requests): return False @@ -387,7 +387,7 @@ def sane(requests: List[Request]) -> bool: class SaneCase: id: str good: bool - requests: List[Request] + requests: list[Request] marks: Marks = () @@ -434,10 +434,10 @@ def test_sane_all_in_order(case: SaneCase) -> None: async def create_acquire_tasks_in_controlled_order( - requests: List[Request], + requests: list[Request], mutex: PriorityMutex[MutexPriority], -) -> List[asyncio.Task[None]]: - tasks: List[asyncio.Task[None]] = [] +) -> list[asyncio.Task[None]]: + tasks: list[asyncio.Task[None]] = [] release_event = asyncio.Event() for request in requests: diff --git a/chia/_tests/util/test_recursive_replace.py b/chia/_tests/util/test_recursive_replace.py index 2bbd492c29ae..dc20bc82beec 100644 --- a/chia/_tests/util/test_recursive_replace.py +++ b/chia/_tests/util/test_recursive_replace.py @@ -2,7 +2,7 @@ import copy from dataclasses import dataclass -from typing import List, Optional, Union +from typing import Optional, Union import pytest @@ -35,7 +35,7 @@ def replace(self, **kwargs: Union[int, str, Optional[TestA]]) -> TestC: class TestA: a: int b: str - c: List[int] + c: list[int] d: Optional[TestC] diff --git a/chia/_tests/util/test_testnet_overrides.py b/chia/_tests/util/test_testnet_overrides.py index 323d8c843789..6b1fc9db99c0 100644 --- a/chia/_tests/util/test_testnet_overrides.py +++ b/chia/_tests/util/test_testnet_overrides.py @@ -1,12 +1,12 @@ from __future__ import annotations -from typing import Any, Dict +from typing import Any from chia.consensus.default_constants import update_testnet_overrides def test_testnet11() -> None: - overrides: Dict[str, Any] = {} + overrides: dict[str, Any] = {} update_testnet_overrides("testnet11", overrides) assert overrides == { "SOFT_FORK5_HEIGHT": 1340000, @@ -14,6 +14,6 @@ def test_testnet11() -> None: def test_mainnet() -> None: - overrides: Dict[str, Any] = {} + overrides: dict[str, Any] = {} update_testnet_overrides("mainnet", overrides) assert overrides == {} diff --git a/chia/_tests/util/test_timing.py b/chia/_tests/util/test_timing.py index d05939aa62b0..5ad38348d357 100644 --- a/chia/_tests/util/test_timing.py +++ b/chia/_tests/util/test_timing.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Iterator +from collections.abc import Iterator from chia.util.timing import backoff_times diff --git a/chia/_tests/util/test_trusted_peer.py b/chia/_tests/util/test_trusted_peer.py index 2d88feb721e0..9bafd15fb6b4 100644 --- a/chia/_tests/util/test_trusted_peer.py +++ b/chia/_tests/util/test_trusted_peer.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Any, Dict, List +from typing import Any import pytest @@ -41,7 +41,7 @@ ], ) def test_is_trusted_peer( - host: str, node_id: bytes32, trusted_peers: Dict[str, Any], trusted_cidrs: List[str], testing: bool, result: bool + host: str, node_id: bytes32, trusted_peers: dict[str, Any], trusted_cidrs: list[str], testing: bool, result: bool ) -> None: assert ( is_trusted_peer( diff --git a/chia/_tests/util/time_out_assert.py b/chia/_tests/util/time_out_assert.py index 644f6d9b97cc..d2cf5a6945f2 100644 --- a/chia/_tests/util/time_out_assert.py +++ b/chia/_tests/util/time_out_assert.py @@ -6,7 +6,7 @@ import logging import pathlib import time -from typing import TYPE_CHECKING, Any, Callable, ClassVar, Dict, Tuple, cast, final +from typing import TYPE_CHECKING, Any, Callable, ClassVar, cast, final import chia import chia._tests @@ -34,10 +34,10 @@ class TimeOutAssertData: label: str = "" - __match_args__: ClassVar[Tuple[str, ...]] = () + __match_args__: ClassVar[tuple[str, ...]] = () @classmethod - def unmarshal(cls, marshalled: Dict[str, Any]) -> TimeOutAssertData: + def unmarshal(cls, marshalled: dict[str, Any]) -> TimeOutAssertData: return cls( duration=marshalled["duration"], path=pathlib.Path(marshalled["path"]), @@ -46,7 +46,7 @@ def unmarshal(cls, marshalled: Dict[str, Any]) -> TimeOutAssertData: timed_out=marshalled["timed_out"], ) - def marshal(self) -> Dict[str, Any]: + def marshal(self) -> dict[str, Any]: return { "duration": self.duration, "path": self.path.as_posix(), diff --git a/chia/_tests/wallet/cat_wallet/test_cat_lifecycle.py b/chia/_tests/wallet/cat_wallet/test_cat_lifecycle.py index 4ea5c0d44f02..702fccd24a12 100644 --- a/chia/_tests/wallet/cat_wallet/test_cat_lifecycle.py +++ b/chia/_tests/wallet/cat_wallet/test_cat_lifecycle.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import List, Optional, Tuple +from typing import Optional import pytest from chia_rs import AugSchemeMPL, G2Element, PrivateKey @@ -35,15 +35,15 @@ async def do_spend( sim: SpendSim, sim_client: SimClient, tail: Program, - coins: List[Coin], - lineage_proofs: List[LineageProof], - inner_solutions: List[Program], - expected_result: Tuple[MempoolInclusionStatus, Optional[Err]], + coins: list[Coin], + lineage_proofs: list[LineageProof], + inner_solutions: list[Program], + expected_result: tuple[MempoolInclusionStatus, Optional[Err]], reveal_limitations_program: bool = True, - signatures: List[G2Element] = [], - extra_deltas: Optional[List[int]] = None, - additional_spends: List[WalletSpendBundle] = [], - limitations_solutions: Optional[List[Program]] = None, + signatures: list[G2Element] = [], + extra_deltas: Optional[list[int]] = None, + additional_spends: list[WalletSpendBundle] = [], + limitations_solutions: Optional[list[Program]] = None, cost_logger: Optional[CostLogger] = None, cost_log_msg: str = "", ) -> int: @@ -52,7 +52,7 @@ async def do_spend( if extra_deltas is None: extra_deltas = [0] * len(coins) - spendable_cat_list: List[SpendableCAT] = [] + spendable_cat_list: list[SpendableCAT] = [] for coin, innersol, proof, limitations_solution, extra_delta in zip( coins, inner_solutions, lineage_proofs, limitations_solutions, extra_deltas ): diff --git a/chia/_tests/wallet/cat_wallet/test_offer_lifecycle.py b/chia/_tests/wallet/cat_wallet/test_offer_lifecycle.py index 7e93494a327e..c4648f791e9c 100644 --- a/chia/_tests/wallet/cat_wallet/test_offer_lifecycle.py +++ b/chia/_tests/wallet/cat_wallet/test_offer_lifecycle.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Any, Dict, List, Optional +from typing import Any, Optional import pytest from chia_rs import G2Element @@ -45,8 +45,8 @@ def str_to_cat_hash(tail_str: str) -> bytes32: # This method takes a dictionary of strings mapping to amounts and generates the appropriate CAT/XCH coins async def generate_coins( - sim: SpendSim, sim_client: SimClient, requested_coins: Dict[Optional[str], List[int]] -) -> Dict[Optional[str], List[Coin]]: + sim: SpendSim, sim_client: SimClient, requested_coins: dict[Optional[str], list[int]] +) -> dict[Optional[str], list[Coin]]: await sim.farm_block(acs_ph) parent_coin = [cr.coin for cr in await sim_client.get_coin_records_by_puzzle_hash(acs_ph)][0] @@ -87,7 +87,7 @@ async def generate_coins( await sim.farm_block() # Search for all of the coins and put them into a dictionary - coin_dict: Dict[Optional[str], List[Coin]] = {} + coin_dict: dict[Optional[str], list[Coin]] = {} for tail_str, _ in requested_coins.items(): if tail_str: tail_hash = str_to_tail_hash(tail_str) @@ -112,15 +112,15 @@ async def generate_coins( # `generate_secure_bundle` simulates a wallet's `generate_signed_transaction` # but doesn't bother with non-offer announcements def generate_secure_bundle( - selected_coins: List[Coin], - announcements: List[AssertPuzzleAnnouncement], + selected_coins: list[Coin], + announcements: list[AssertPuzzleAnnouncement], offered_amount: uint64, tail_str: Optional[str] = None, ) -> WalletSpendBundle: announcement_assertions = [a.to_program() for a in announcements] selected_coin_amount = sum(c.amount for c in selected_coins) non_primaries = [] if len(selected_coins) < 2 else selected_coins[1:] - inner_solution: List[Any] = [ + inner_solution: list[Any] = [ [51, Offer.ph(), offered_amount], # Offered coin [51, acs_ph, uint64(selected_coin_amount - offered_amount)], # Change *announcement_assertions, @@ -178,7 +178,7 @@ async def test_complex_offer(cost_logger: CostLogger) -> None: driver_dict_as_infos = {key.hex(): value.info for key, value in driver_dict.items()} # Create an XCH Offer for RED - chia_requested_payments: Dict[Optional[bytes32], List[Payment]] = { + chia_requested_payments: dict[Optional[bytes32], list[Payment]] = { str_to_tail_hash("red"): [Payment(acs_ph, uint64(100), [b"memo"]), Payment(acs_ph, uint64(200), [b"memo"])] } chia_notarized_payments = Offer.notarize_payments(chia_requested_payments, chia_coins) @@ -190,7 +190,7 @@ async def test_complex_offer(cost_logger: CostLogger) -> None: # Create a RED Offer for XCH red_coins_1 = red_coins[0:1] red_coins_2 = red_coins[1:] - red_requested_payments: Dict[Optional[bytes32], List[Payment]] = { + red_requested_payments: dict[Optional[bytes32], list[Payment]] = { None: [Payment(acs_ph, uint64(300), [b"red memo"]), Payment(acs_ph, uint64(350), [b"red memo"])] } red_notarized_payments = Offer.notarize_payments(red_requested_payments, red_coins_1) @@ -201,7 +201,7 @@ async def test_complex_offer(cost_logger: CostLogger) -> None: red_offer = Offer(red_notarized_payments, red_secured_bundle, driver_dict) assert not red_offer.is_valid() - red_requested_payments_2: Dict[Optional[bytes32], List[Payment]] = { + red_requested_payments_2: dict[Optional[bytes32], list[Payment]] = { None: [Payment(acs_ph, uint64(50), [b"red memo"])] } red_notarized_payments_2 = Offer.notarize_payments(red_requested_payments_2, red_coins_2) @@ -219,7 +219,7 @@ async def test_complex_offer(cost_logger: CostLogger) -> None: assert new_offer.is_valid() # Create yet another offer of BLUE for XCH and RED - blue_requested_payments: Dict[Optional[bytes32], List[Payment]] = { + blue_requested_payments: dict[Optional[bytes32], list[Payment]] = { None: [Payment(acs_ph, uint64(200), [b"blue memo"])], str_to_tail_hash("red"): [Payment(acs_ph, uint64(50), [b"blue memo"])], } diff --git a/chia/_tests/wallet/cat_wallet/test_trades.py b/chia/_tests/wallet/cat_wallet/test_trades.py index 74c09f96b8d6..6b38db26a031 100644 --- a/chia/_tests/wallet/cat_wallet/test_trades.py +++ b/chia/_tests/wallet/cat_wallet/test_trades.py @@ -1,7 +1,7 @@ from __future__ import annotations import dataclasses -from typing import Any, Dict, List, Union +from typing import Any, Union import pytest from chia_rs import G2Element @@ -36,7 +36,7 @@ from chia.wallet.wallet_node import WalletNode from chia.wallet.wallet_spend_bundle import WalletSpendBundle -OfferSummary = Dict[Union[int, bytes32], int] +OfferSummary = dict[Union[int, bytes32], int] async def get_trade_and_status(trade_manager: TradeManager, trade: TradeRecord) -> TradeStatus: @@ -166,7 +166,7 @@ async def test_cat_trades( tail_taker = Program.to([3, (1, "taker"), None, None]) proofs_checker_maker = ProofsChecker(["foo", "bar"]) proofs_checker_taker = ProofsChecker(["bar", "zap"]) - authorized_providers: List[bytes32] = [did_id_maker, did_id_taker] + authorized_providers: list[bytes32] = [did_id_maker, did_id_taker] cat_wallet_maker: CATWallet = await CRCATWallet.get_or_create_wallet_for_cat( wallet_node_maker.wallet_state_manager, wallet_maker, @@ -445,10 +445,10 @@ async def test_cat_trades( new_cat_wallet_maker.id(): 15, } - driver_dict: Dict[bytes32, PuzzleInfo] = {} + driver_dict: dict[bytes32, PuzzleInfo] = {} for wallet in (cat_wallet_maker, new_cat_wallet_maker): asset_id: str = wallet.get_asset_id() - driver_item: Dict[str, Any] = { + driver_item: dict[str, Any] = { "type": AssetType.CAT.value, "tail": "0x" + asset_id, } @@ -1876,8 +1876,8 @@ async def test_trade_cancellation(wallet_environments: WalletTestFramework) -> N total_spend = SpendBundle.aggregate( [tx.spend_bundle for tx in action_scope.side_effects.transactions if tx.spend_bundle is not None] ) - all_conditions: List[Program] = [] - creations: List[CreateCoinAnnouncement] = [] + all_conditions: list[Program] = [] + creations: list[CreateCoinAnnouncement] = [] announcement_nonce = std_hash(trade_make.trade_id) for spend in total_spend.coin_spends: all_conditions.extend( diff --git a/chia/_tests/wallet/clawback/test_clawback_decorator.py b/chia/_tests/wallet/clawback/test_clawback_decorator.py index 592c1d348f60..0e9f816a2bc3 100644 --- a/chia/_tests/wallet/clawback/test_clawback_decorator.py +++ b/chia/_tests/wallet/clawback/test_clawback_decorator.py @@ -1,7 +1,5 @@ from __future__ import annotations -from typing import List, Tuple - import pytest from chia.server.server import ChiaServer @@ -19,7 +17,7 @@ ) @pytest.mark.anyio async def test_missing_decorator( - simulator_and_wallet: Tuple[List[FullNodeSimulator], List[Tuple[WalletNode, ChiaServer]], BlockTools], + simulator_and_wallet: tuple[list[FullNodeSimulator], list[tuple[WalletNode, ChiaServer]], BlockTools], trusted: bool, self_hostname: str, ) -> None: @@ -39,7 +37,7 @@ async def test_missing_decorator( ) @pytest.mark.anyio async def test_unknown_decorator( - simulator_and_wallet: Tuple[List[FullNodeSimulator], List[Tuple[WalletNode, ChiaServer]], BlockTools], + simulator_and_wallet: tuple[list[FullNodeSimulator], list[tuple[WalletNode, ChiaServer]], BlockTools], trusted: bool, self_hostname: str, ) -> None: @@ -59,7 +57,7 @@ async def test_unknown_decorator( ) @pytest.mark.anyio async def test_decorator( - simulator_and_wallet: Tuple[List[FullNodeSimulator], List[Tuple[WalletNode, ChiaServer]], BlockTools], + simulator_and_wallet: tuple[list[FullNodeSimulator], list[tuple[WalletNode, ChiaServer]], BlockTools], trusted: bool, self_hostname: str, ) -> None: diff --git a/chia/_tests/wallet/clawback/test_clawback_lifecycle.py b/chia/_tests/wallet/clawback/test_clawback_lifecycle.py index 7a72508462b3..6e25a54bd049 100644 --- a/chia/_tests/wallet/clawback/test_clawback_lifecycle.py +++ b/chia/_tests/wallet/clawback/test_clawback_lifecycle.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Dict, Optional, Tuple +from typing import Optional import pytest from chia_rs import AugSchemeMPL, G1Element, G2Element, PrivateKey @@ -48,7 +48,7 @@ async def do_spend( sim: SpendSim, sim_client: SimClient, spend_bundle: WalletSpendBundle, - expected_result: Tuple[MempoolInclusionStatus, Optional[Err]], + expected_result: tuple[MempoolInclusionStatus, Optional[Err]], cost_logger: Optional[CostLogger] = None, cost_log_msg: str = "", ) -> int: @@ -277,7 +277,7 @@ def test_clawback_puzzles(self) -> None: cb_sender_sol = create_merkle_solution(timelock, sender_ph, recipient_ph, sender_puz, sender_sol) conds = conditions_dict_for_solution(clawback_puz, cb_sender_sol, INFINITE_COST) - assert isinstance(conds, Dict) + assert isinstance(conds, dict) create_coins = conds[ConditionOpcode.CREATE_COIN] assert len(create_coins) == 1 assert create_coins[0].vars[0] == sender_ph @@ -286,7 +286,7 @@ def test_clawback_puzzles(self) -> None: cb_recipient_sol = create_merkle_solution(timelock, sender_ph, recipient_ph, recipient_puz, recipient_sol) clawback_puz.run(cb_recipient_sol) conds = conditions_dict_for_solution(clawback_puz, cb_recipient_sol, INFINITE_COST) - assert isinstance(conds, Dict) + assert isinstance(conds, dict) create_coins = conds[ConditionOpcode.CREATE_COIN] assert len(create_coins) == 1 assert create_coins[0].vars[0] == recipient_ph diff --git a/chia/_tests/wallet/clawback/test_clawback_metadata.py b/chia/_tests/wallet/clawback/test_clawback_metadata.py index 4bc178df1695..4bae7b19ab6e 100644 --- a/chia/_tests/wallet/clawback/test_clawback_metadata.py +++ b/chia/_tests/wallet/clawback/test_clawback_metadata.py @@ -1,7 +1,6 @@ from __future__ import annotations import random -from typing import List, Tuple import pytest @@ -21,7 +20,7 @@ ) @pytest.mark.anyio async def test_is_recipient( - simulator_and_wallet: Tuple[List[FullNodeSimulator], List[Tuple[WalletNode, ChiaServer]], BlockTools], + simulator_and_wallet: tuple[list[FullNodeSimulator], list[tuple[WalletNode, ChiaServer]], BlockTools], trusted: bool, self_hostname: str, seeded_random: random.Random, diff --git a/chia/_tests/wallet/conftest.py b/chia/_tests/wallet/conftest.py index 5dbca35a79dd..3fbcce66e1da 100644 --- a/chia/_tests/wallet/conftest.py +++ b/chia/_tests/wallet/conftest.py @@ -1,8 +1,9 @@ from __future__ import annotations +from collections.abc import AsyncIterator, Awaitable from contextlib import AsyncExitStack from dataclasses import replace -from typing import Any, AsyncIterator, Awaitable, Callable, Dict, List, Literal, Optional, Tuple +from typing import Any, Callable, Literal, Optional import pytest @@ -55,7 +56,7 @@ async def ignore_block_validation( if "standard_block_tools" in request.keywords: return None - async def validate_block_body(*args: Any, **kwargs: Any) -> Tuple[Literal[None], NPCResult]: + async def validate_block_body(*args: Any, **kwargs: Any) -> tuple[Literal[None], NPCResult]: return None, args[7] def create_wrapper(original_create: Any) -> Any: @@ -128,7 +129,7 @@ async def wallet_environments( pytest.skip("Skipping not specified reuse_puzhash mode") assert len(request.param["blocks_needed"]) == request.param["num_environments"] if "config_overrides" in request.param: - config_overrides: Dict[str, Any] = request.param["config_overrides"] + config_overrides: dict[str, Any] = request.param["config_overrides"] else: # pragma: no cover config_overrides = {} async with setup_simulators_and_wallets_service( @@ -141,7 +142,7 @@ async def wallet_environments( full_node[0]._api.full_node.config = {**full_node[0]._api.full_node.config, **config_overrides} - wallet_rpc_clients: List[WalletRpcClient] = [] + wallet_rpc_clients: list[WalletRpcClient] = [] async with AsyncExitStack() as astack: for service in wallet_services: service._node.config = { @@ -171,7 +172,7 @@ async def wallet_environments( ) ) - wallet_states: List[WalletState] = [] + wallet_states: list[WalletState] = [] for service, blocks_needed in zip(wallet_services, request.param["blocks_needed"]): if blocks_needed > 0: await full_node[0]._api.farm_blocks_to_wallet( diff --git a/chia/_tests/wallet/dao_wallet/test_dao_clvm.py b/chia/_tests/wallet/dao_wallet/test_dao_clvm.py index f31ab3bbd640..a7f5a6e4f92f 100644 --- a/chia/_tests/wallet/dao_wallet/test_dao_clvm.py +++ b/chia/_tests/wallet/dao_wallet/test_dao_clvm.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Any, List, Optional, Tuple +from typing import Any, Optional import pytest from chia_rs import AugSchemeMPL @@ -542,7 +542,7 @@ def test_validator() -> None: spend_p2_singleton_puzhash = spend_p2_singleton.get_tree_hash() parent_amt_list = [[parent_id, locked_amount]] - cat_parent_amt_list: List[Optional[Any]] = [] + cat_parent_amt_list: list[Optional[Any]] = [] spend_p2_singleton_solution = Program.to([parent_amt_list, cat_parent_amt_list, treasury_inner.get_tree_hash()]) output_conds = spend_p2_singleton.run(spend_p2_singleton_solution) @@ -721,7 +721,7 @@ def test_merge_p2_singleton() -> None: ] amounts = [uint64(1000), uint64(2000), uint64(3000)] parent_puzhash_amounts = [] - merge_coin_ids: List[bytes32] = [] + merge_coin_ids: list[bytes32] = [] for pid, amt in zip(parent_ids, amounts): parent_puzhash_amounts.append([pid, my_puzhash, amt]) merge_coin_ids.append(Coin(pid, my_puzhash, amt).name()) @@ -825,7 +825,7 @@ def test_treasury() -> None: spend_p2_singleton_puzhash = spend_p2_singleton.get_tree_hash() parent_amt_list = [[parent_id, locked_amount]] - cat_parent_amt_list: List[Optional[Any]] = [] + cat_parent_amt_list: list[Optional[Any]] = [] spend_p2_singleton_solution = Program.to([parent_amt_list, cat_parent_amt_list, treasury_inner.get_tree_hash()]) proposal: Program = proposal_curry_one.curry( @@ -1072,7 +1072,7 @@ def test_proposal_lifecycle() -> None: spend_p2_singleton_puzhash = spend_p2_singleton.get_tree_hash() parent_amt_list = [[parent_id, locked_amount]] - cat_parent_amt_list: List[Optional[Any]] = [] + cat_parent_amt_list: list[Optional[Any]] = [] spend_p2_singleton_solution = Program.to([parent_amt_list, cat_parent_amt_list, treasury_inner_puzhash]) # Setup Proposal @@ -1242,10 +1242,10 @@ def test_proposal_lifecycle() -> None: async def do_spend( sim: SpendSim, sim_client: SimClient, - coins: List[Coin], - puzzles: List[Program], - solutions: List[Program], -) -> Tuple[MempoolInclusionStatus, Optional[Err]]: + coins: list[Coin], + puzzles: list[Program], + solutions: list[Program], +) -> tuple[MempoolInclusionStatus, Optional[Err]]: spends = [] for coin, puzzle, solution in zip(coins, puzzles, solutions): spends.append(make_spend(coin, puzzle, solution)) diff --git a/chia/_tests/wallet/dao_wallet/test_dao_wallets.py b/chia/_tests/wallet/dao_wallet/test_dao_wallets.py index 1261e9f9121b..790b4195c512 100644 --- a/chia/_tests/wallet/dao_wallet/test_dao_wallets.py +++ b/chia/_tests/wallet/dao_wallet/test_dao_wallets.py @@ -2,7 +2,7 @@ import asyncio import time -from typing import Any, Callable, Dict, List, Optional, Tuple, Union +from typing import Any, Callable, Optional, Union import pytest @@ -32,17 +32,17 @@ from chia.wallet.util.tx_config import DEFAULT_TX_CONFIG -async def get_proposal_state(wallet: DAOWallet, index: int) -> Tuple[Optional[bool], Optional[bool]]: +async def get_proposal_state(wallet: DAOWallet, index: int) -> tuple[Optional[bool], Optional[bool]]: return wallet.dao_info.proposals_list[index].passed, wallet.dao_info.proposals_list[index].closed async def rpc_state( timeout: float, async_function: Callable[[Any], Any], - params: List[Union[int, Dict[str, Any]]], - condition_func: Callable[[Dict[str, Any]], Any], + params: list[Union[int, dict[str, Any]]], + condition_func: Callable[[dict[str, Any]], Any], result: Optional[Any] = None, -) -> Union[bool, Dict[str, Any]]: # pragma: no cover +) -> Union[bool, dict[str, Any]]: # pragma: no cover __tracebackhide__ = True timeout = adjusted_timeout(timeout=timeout) diff --git a/chia/_tests/wallet/db_wallet/test_db_graftroot.py b/chia/_tests/wallet/db_wallet/test_db_graftroot.py index f61a6fea0a07..edd4f40dc3ca 100644 --- a/chia/_tests/wallet/db_wallet/test_db_graftroot.py +++ b/chia/_tests/wallet/db_wallet/test_db_graftroot.py @@ -1,7 +1,5 @@ from __future__ import annotations -from typing import Dict, List, Tuple - import pytest from chia_rs import G2Element @@ -42,11 +40,11 @@ async def test_graftroot(cost_logger: CostLogger) -> None: async with sim_and_client() as (sim, sim_client): # Create the coin we're testing - all_values: List[bytes32] = [bytes32([x] * 32) for x in range(0, 100)] + all_values: list[bytes32] = [bytes32([x] * 32) for x in range(0, 100)] root, proofs = build_merkle_tree(all_values) p2_conditions = Program.to((1, [[51, ACS_PH, 0]])) # An coin to create to make sure this hits the blockchain desired_key_values = ((bytes32([0] * 32), bytes32([1] * 32)), (bytes32([7] * 32), bytes32([8] * 32))) - desired_row_hashes: List[bytes32] = [build_merkle_tree_from_binary_tree(kv)[0] for kv in desired_key_values] + desired_row_hashes: list[bytes32] = [build_merkle_tree_from_binary_tree(kv)[0] for kv in desired_key_values] fake_struct: Program = Program.to((ACS_PH, NIL_PH)) graftroot_puzzle: Program = GRAFTROOT_MOD.curry( # Do everything twice to test depending on multiple singleton updates @@ -61,21 +59,21 @@ async def test_graftroot(cost_logger: CostLogger) -> None: ].coin # Build some merkle trees that won't satidy the requirements - def filter_all(values: List[bytes32]) -> List[bytes32]: + def filter_all(values: list[bytes32]) -> list[bytes32]: return [h for i, h in enumerate(values) if (h, values[min(i, i + 1)]) not in desired_key_values] - def filter_to_only_one(values: List[bytes32]) -> List[bytes32]: + def filter_to_only_one(values: list[bytes32]) -> list[bytes32]: return [h for i, h in enumerate(values) if (h, values[min(i, i + 1)]) not in desired_key_values[1:]] # And one that will - def filter_none(values: List[bytes32]) -> List[bytes32]: + def filter_none(values: list[bytes32]) -> list[bytes32]: return values for list_filter in (filter_all, filter_to_only_one, filter_none): # Create the "singleton" filtered_values = list_filter(all_values) root, proofs = build_merkle_tree(filtered_values) - filtered_row_hashes: Dict[bytes32, Tuple[int, List[bytes32]]] = { + filtered_row_hashes: dict[bytes32, tuple[int, list[bytes32]]] = { simplify_merkle_proof(v, (proofs[v][0], [proofs[v][1][0]])): (proofs[v][0] >> 1, proofs[v][1][1:]) for v in filtered_values } diff --git a/chia/_tests/wallet/db_wallet/test_dl_offers.py b/chia/_tests/wallet/db_wallet/test_dl_offers.py index 336d5e353c72..49fc5830f920 100644 --- a/chia/_tests/wallet/db_wallet/test_dl_offers.py +++ b/chia/_tests/wallet/db_wallet/test_dl_offers.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Any, List, Tuple +from typing import Any import pytest @@ -31,9 +31,9 @@ async def get_trade_and_status(trade_manager: Any, trade: TradeRecord) -> TradeS return TradeStatus(trade_rec.status) -def get_parent_branch(value: bytes32, proof: Tuple[int, List[bytes32]]) -> Tuple[bytes32, Tuple[int, List[bytes32]]]: +def get_parent_branch(value: bytes32, proof: tuple[int, list[bytes32]]) -> tuple[bytes32, tuple[int, list[bytes32]]]: branch: bytes32 = simplify_merkle_proof(value, (proof[0], [proof[1][0]])) - new_proof: Tuple[int, List[bytes32]] = (proof[0] >> 1, proof[1][1:]) + new_proof: tuple[int, list[bytes32]] = (proof[0] >> 1, proof[1][1:]) return branch, new_proof diff --git a/chia/_tests/wallet/db_wallet/test_dl_wallet.py b/chia/_tests/wallet/db_wallet/test_dl_wallet.py index d5c5b473784a..d8df878219ce 100644 --- a/chia/_tests/wallet/db_wallet/test_dl_wallet.py +++ b/chia/_tests/wallet/db_wallet/test_dl_wallet.py @@ -2,7 +2,7 @@ import asyncio import dataclasses -from typing import Any, List +from typing import Any import pytest @@ -591,7 +591,7 @@ async def test_mirrors(wallets_prefarm: Any, trusted: bool) -> None: await dl_wallet_1.create_new_mirror( launcher_id_2, uint64(3), [b"foo", b"bar"], action_scope, fee=uint64(1_999_999_999_999) ) - additions: List[Coin] = [] + additions: list[Coin] = [] for tx in action_scope.side_effects.transactions: if tx.spend_bundle is not None: additions.extend(tx.spend_bundle.additions()) diff --git a/chia/_tests/wallet/did_wallet/test_did.py b/chia/_tests/wallet/did_wallet/test_did.py index 7a405aab702c..a7f89ec0e80c 100644 --- a/chia/_tests/wallet/did_wallet/test_did.py +++ b/chia/_tests/wallet/did_wallet/test_did.py @@ -2,7 +2,6 @@ import dataclasses import json -from typing import List, Tuple import pytest from chia_rs import AugSchemeMPL, G1Element, G2Element @@ -774,7 +773,7 @@ async def test_did_recovery_with_empty_set(self, wallet_environments: WalletTest ] ) coin = await did_wallet.get_coin() - info: List[Tuple[bytes, bytes, int]] = [] + info: list[tuple[bytes, bytes, int]] = [] pubkey = (await did_wallet.wallet_state_manager.get_unused_derivation_record(did_wallet.wallet_info.id)).pubkey with pytest.raises(Exception): # We expect a CLVM 80 error for this test async with did_wallet.wallet_state_manager.new_action_scope( @@ -1369,7 +1368,7 @@ async def test_did_transfer(self, wallet_environments: WalletTestFramework, with async def test_did_auto_transfer_limit( self, self_hostname: str, - two_wallet_nodes: Tuple[List[FullNodeSimulator], List[Tuple[WalletNode, ChiaServer]], BlockTools], + two_wallet_nodes: tuple[list[FullNodeSimulator], list[tuple[WalletNode, ChiaServer]], BlockTools], trusted: bool, ) -> None: fee = uint64(1000) @@ -2136,7 +2135,7 @@ async def test_create_did_with_recovery_list( async def test_did_resync( self, self_hostname: str, - two_wallet_nodes: Tuple[List[FullNodeSimulator], List[Tuple[WalletNode, ChiaServer]], BlockTools], + two_wallet_nodes: tuple[list[FullNodeSimulator], list[tuple[WalletNode, ChiaServer]], BlockTools], trusted: bool, ) -> None: full_nodes, wallets, _ = two_wallet_nodes diff --git a/chia/_tests/wallet/nft_wallet/test_nft_1_offers.py b/chia/_tests/wallet/nft_wallet/test_nft_1_offers.py index afeb783ac579..656d8f78c412 100644 --- a/chia/_tests/wallet/nft_wallet/test_nft_1_offers.py +++ b/chia/_tests/wallet/nft_wallet/test_nft_1_offers.py @@ -3,7 +3,8 @@ import asyncio import logging import random -from typing import Any, Callable, Coroutine, Optional, Tuple +from collections.abc import Coroutine +from typing import Any, Callable, Optional import pytest @@ -1104,7 +1105,7 @@ async def test_complex_nft_offer( self_hostname: str, two_wallet_nodes: Any, trusted: Any, - royalty_pts: Tuple[int, int, int], + royalty_pts: tuple[int, int, int], seeded_random: random.Random, ) -> None: """ diff --git a/chia/_tests/wallet/nft_wallet/test_nft_lifecycle.py b/chia/_tests/wallet/nft_wallet/test_nft_lifecycle.py index f23b81a36ccc..e8f8d71f5f34 100644 --- a/chia/_tests/wallet/nft_wallet/test_nft_lifecycle.py +++ b/chia/_tests/wallet/nft_wallet/test_nft_lifecycle.py @@ -1,7 +1,6 @@ from __future__ import annotations import itertools -from typing import List import pytest from chia_rs import G2Element @@ -68,13 +67,13 @@ async def test_state_layer(cost_logger: CostLogger, metadata_updater: str) -> No await sim.farm_block() if metadata_updater == "default": - metadata_updater_solutions: List[Program] = [ + metadata_updater_solutions: list[Program] = [ Program.to((b"u", "update")), Program.to((b"lu", "update")), Program.to((b"mu", "update")), Program.to((b"foo", "update")), ] - expected_metadatas: List[Program] = [ + expected_metadatas: list[Program] = [ metadata_to_program( { b"u": ["update", "hey hey"], diff --git a/chia/_tests/wallet/nft_wallet/test_nft_offers.py b/chia/_tests/wallet/nft_wallet/test_nft_offers.py index 8660e1583090..5a1a4dea61a8 100644 --- a/chia/_tests/wallet/nft_wallet/test_nft_offers.py +++ b/chia/_tests/wallet/nft_wallet/test_nft_offers.py @@ -1,7 +1,7 @@ from __future__ import annotations import random -from typing import Any, Dict, Optional +from typing import Any, Optional import pytest @@ -123,7 +123,7 @@ async def test_nft_offer_with_fee( nft_to_offer = coins_maker[0] nft_info: Optional[PuzzleInfo] = match_puzzle(uncurry_puzzle(nft_to_offer.full_puzzle)) nft_asset_id: bytes32 = create_asset_id(nft_info) # type: ignore - driver_dict: Dict[bytes32, Optional[PuzzleInfo]] = {nft_asset_id: nft_info} + driver_dict: dict[bytes32, Optional[PuzzleInfo]] = {nft_asset_id: nft_info} xch_request = 100 maker_fee = uint64(10) @@ -205,7 +205,7 @@ async def test_nft_offer_with_fee( nft_to_buy = coins_taker[0] nft_to_buy_info: Optional[PuzzleInfo] = match_puzzle(uncurry_puzzle(nft_to_buy.full_puzzle)) nft_to_buy_asset_id: bytes32 = create_asset_id(nft_to_buy_info) # type: ignore - driver_dict_to_buy: Dict[bytes32, Optional[PuzzleInfo]] = {nft_to_buy_asset_id: nft_to_buy_info} + driver_dict_to_buy: dict[bytes32, Optional[PuzzleInfo]] = {nft_to_buy_asset_id: nft_to_buy_info} xch_offered = 1000 maker_fee = uint64(10) @@ -327,7 +327,7 @@ async def test_nft_offer_cancellations( nft_to_offer = coins_maker[0] nft_info: Optional[PuzzleInfo] = match_puzzle(uncurry_puzzle(nft_to_offer.full_puzzle)) nft_asset_id: bytes32 = create_asset_id(nft_info) # type: ignore - driver_dict: Dict[bytes32, Optional[PuzzleInfo]] = {nft_asset_id: nft_info} + driver_dict: dict[bytes32, Optional[PuzzleInfo]] = {nft_asset_id: nft_info} xch_request = 100 maker_fee = uint64(10) @@ -472,7 +472,7 @@ async def test_nft_offer_with_metadata_update( nft_to_offer = coins_maker[0] nft_info: Optional[PuzzleInfo] = match_puzzle(uncurry_puzzle(nft_to_offer.full_puzzle)) nft_asset_id: bytes32 = create_asset_id(nft_info) # type: ignore - driver_dict: Dict[bytes32, Optional[PuzzleInfo]] = {nft_asset_id: nft_info} + driver_dict: dict[bytes32, Optional[PuzzleInfo]] = {nft_asset_id: nft_info} xch_request = 100 maker_fee = uint64(10) @@ -643,7 +643,7 @@ async def test_nft_offer_nft_for_cat( nft_to_offer = coins_maker[0] nft_info: Optional[PuzzleInfo] = match_puzzle(uncurry_puzzle(nft_to_offer.full_puzzle)) nft_asset_id: bytes32 = create_asset_id(nft_info) # type: ignore - driver_dict: Dict[bytes32, Optional[PuzzleInfo]] = {nft_asset_id: nft_info} + driver_dict: dict[bytes32, Optional[PuzzleInfo]] = {nft_asset_id: nft_info} maker_fee = uint64(10) taker_cat_offered = 2500 @@ -732,7 +732,7 @@ async def test_nft_offer_nft_for_cat( nft_to_buy_info: Optional[PuzzleInfo] = match_puzzle(uncurry_puzzle(nft_to_buy.full_puzzle)) nft_to_buy_asset_id: bytes32 = create_asset_id(nft_to_buy_info) # type: ignore - driver_dict_to_buy: Dict[bytes32, Optional[PuzzleInfo]] = { + driver_dict_to_buy: dict[bytes32, Optional[PuzzleInfo]] = { nft_to_buy_asset_id: nft_to_buy_info, } @@ -889,7 +889,7 @@ async def test_nft_offer_nft_for_nft( nft_to_take_info: Optional[PuzzleInfo] = match_puzzle(uncurry_puzzle(nft_to_take.full_puzzle)) nft_to_take_asset_id: bytes32 = create_asset_id(nft_to_take_info) # type: ignore - driver_dict: Dict[bytes32, Optional[PuzzleInfo]] = { + driver_dict: dict[bytes32, Optional[PuzzleInfo]] = { nft_to_offer_asset_id: nft_to_offer_info, nft_to_take_asset_id: nft_to_take_info, } @@ -1064,7 +1064,7 @@ async def test_nft_offer_nft0_and_xch_for_cat( nft_to_offer = coins_maker[0] nft_info: Optional[PuzzleInfo] = match_puzzle(uncurry_puzzle(nft_to_offer.full_puzzle)) nft_asset_id: bytes32 = create_asset_id(nft_info) # type: ignore - driver_dict: Dict[bytes32, Optional[PuzzleInfo]] = {nft_asset_id: nft_info} + driver_dict: dict[bytes32, Optional[PuzzleInfo]] = {nft_asset_id: nft_info} maker_fee = uint64(10) maker_xch_offered = 1000 @@ -1160,7 +1160,7 @@ async def test_nft_offer_nft0_and_xch_for_cat( nft_to_buy_info: Optional[PuzzleInfo] = match_puzzle(uncurry_puzzle(nft_to_buy.full_puzzle)) nft_to_buy_asset_id: bytes32 = create_asset_id(nft_to_buy_info) # type: ignore - driver_dict_to_buy: Dict[bytes32, Optional[PuzzleInfo]] = { + driver_dict_to_buy: dict[bytes32, Optional[PuzzleInfo]] = { nft_to_buy_asset_id: nft_to_buy_info, } diff --git a/chia/_tests/wallet/nft_wallet/test_nft_puzzles.py b/chia/_tests/wallet/nft_wallet/test_nft_puzzles.py index cc630149870e..4a7c40d7e7e3 100644 --- a/chia/_tests/wallet/nft_wallet/test_nft_puzzles.py +++ b/chia/_tests/wallet/nft_wallet/test_nft_puzzles.py @@ -1,7 +1,7 @@ from __future__ import annotations import random -from typing import Optional, Tuple +from typing import Optional from chia._tests.core.make_block_generator import int_to_public_key from chia.types.blockchain_format.program import Program @@ -104,7 +104,7 @@ def test_nft_transfer_puzzle_hashes(seeded_random: random.Random) -> None: assert expected_ph == calculated_ph -def make_a_new_solution() -> Tuple[Program, Program]: +def make_a_new_solution() -> tuple[Program, Program]: destination = int_to_public_key(2) p2_puzzle = puzzle_for_pk(destination) puzhash = p2_puzzle.get_tree_hash() @@ -116,7 +116,7 @@ def make_a_new_solution() -> Tuple[Program, Program]: return p2_puzzle, solution -def make_a_new_ownership_layer_puzzle() -> Tuple[Program, Program]: +def make_a_new_ownership_layer_puzzle() -> tuple[Program, Program]: pubkey = int_to_public_key(1) innerpuz = puzzle_for_pk(pubkey) old_did = Program.to("test_2").get_tree_hash() diff --git a/chia/_tests/wallet/nft_wallet/test_nft_wallet.py b/chia/_tests/wallet/nft_wallet/test_nft_wallet.py index 5d1bc8464491..0ac39f547e61 100644 --- a/chia/_tests/wallet/nft_wallet/test_nft_wallet.py +++ b/chia/_tests/wallet/nft_wallet/test_nft_wallet.py @@ -2,7 +2,7 @@ import asyncio import time -from typing import Any, Callable, Dict, List +from typing import Any, Callable import pytest from chia_rs import AugSchemeMPL, G1Element, G2Element @@ -49,9 +49,9 @@ async def get_wallet_number(manager: WalletStateManager) -> int: async def wait_rpc_state_condition( timeout: float, async_function: Any, - params: List[Any], - condition_func: Callable[[Dict[str, Any]], bool], -) -> Dict[str, Any]: + params: list[Any], + condition_func: Callable[[dict[str, Any]], bool], +) -> dict[str, Any]: __tracebackhide__ = True timeout = adjusted_timeout(timeout=timeout) @@ -595,7 +595,7 @@ async def test_nft_wallet_rpc_creation_and_list(wallet_environments: WalletTestF ["nft_get_nfts", dict(wallet_id=env.wallet_aliases["nft"])], lambda x: x["success"] and len(x["nft_list"]) == 2, ) - coins: List[NFTInfo] = [NFTInfo.from_json_dict(d) for d in coins_response["nft_list"]] + coins: list[NFTInfo] = [NFTInfo.from_json_dict(d) for d in coins_response["nft_list"]] uris = [] for coin in coins: assert not coin.supports_did @@ -670,7 +670,7 @@ async def test_nft_wallet_rpc_update_metadata(wallet_environments: WalletTestFra ] ) - coins: List[Dict[str, Any]] = (await env.rpc_client.list_nfts(nft_wallet.id(), start_index=0, num=1))["nft_list"] + coins: list[dict[str, Any]] = (await env.rpc_client.list_nfts(nft_wallet.id(), start_index=0, num=1))["nft_list"] coin = coins[0] assert coin["mint_height"] > 0 assert coin["data_hash"] == "0xd4584ad463139fa8c0d9f68f4b59f185" @@ -929,7 +929,7 @@ async def test_nft_with_did_wallet_creation(wallet_environments: WalletTestFrame ] ) # Check DID NFT - coins: List[Dict[str, Any]] = (await env.rpc_client.list_nfts(nft_wallet.id(), start_index=0, num=1))["nft_list"] + coins: list[dict[str, Any]] = (await env.rpc_client.list_nfts(nft_wallet.id(), start_index=0, num=1))["nft_list"] assert len(coins) == 1 did_nft = coins[0] assert did_nft["mint_height"] > 0 @@ -1062,7 +1062,7 @@ async def test_nft_rpc_mint(wallet_environments: WalletTestFramework) -> None: ] ) - coins: List[Dict[str, Any]] = ( + coins: list[dict[str, Any]] = ( await env.rpc_client.list_nfts(env.wallet_aliases["nft_w_did"], start_index=0, num=1) )["nft_list"] assert len(coins) == 1 @@ -1199,7 +1199,7 @@ async def test_nft_transfer_nft_with_did(wallet_environments: WalletTestFramewor ) # Check DID NFT - coins: List[Dict[str, Any]] = (await env_0.rpc_client.list_nfts(env_0.wallet_aliases["nft"], start_index=0, num=1))[ + coins: list[dict[str, Any]] = (await env_0.rpc_client.list_nfts(env_0.wallet_aliases["nft"], start_index=0, num=1))[ "nft_list" ] assert len(coins) == 1 diff --git a/chia/_tests/wallet/rpc/test_wallet_rpc.py b/chia/_tests/wallet/rpc/test_wallet_rpc.py index 858ee8c2c11e..06a4aaa589c2 100644 --- a/chia/_tests/wallet/rpc/test_wallet_rpc.py +++ b/chia/_tests/wallet/rpc/test_wallet_rpc.py @@ -6,7 +6,7 @@ import logging import random from operator import attrgetter -from typing import Any, Dict, List, Optional, Tuple, cast +from typing import Any, Optional, cast import aiosqlite import pytest @@ -231,7 +231,7 @@ async def wallet_rpc_environment(two_wallet_nodes_services, request, self_hostna yield WalletRpcTestEnvironment(wallet_bundle_1, wallet_bundle_2, node_bundle) -async def create_tx_outputs(wallet: Wallet, output_args: List[Tuple[int, Optional[List[str]]]]) -> List[Dict[str, Any]]: +async def create_tx_outputs(wallet: Wallet, output_args: list[tuple[int, Optional[list[str]]]]) -> list[dict[str, Any]]: outputs = [] for args in output_args: output = {"amount": uint64(args[0]), "puzzle_hash": await wallet.get_new_puzzlehash()} @@ -242,7 +242,7 @@ async def create_tx_outputs(wallet: Wallet, output_args: List[Tuple[int, Optiona return outputs -async def assert_wallet_types(client: WalletRpcClient, expected: Dict[WalletType, int]) -> None: +async def assert_wallet_types(client: WalletRpcClient, expected: dict[WalletType, int]) -> None: for wallet_type in WalletType: wallets = await client.get_wallets(wallet_type) wallet_count = len(wallets) @@ -254,7 +254,7 @@ async def assert_wallet_types(client: WalletRpcClient, expected: Dict[WalletType def assert_tx_amounts( tx: TransactionRecord, - outputs: List[Dict[str, Any]], + outputs: list[dict[str, Any]], *, amount_fee: uint64, change_expected: bool, @@ -541,7 +541,7 @@ async def test_get_timestamp_for_height(wallet_rpc_environment: WalletRpcTestEnv @pytest.mark.anyio async def test_create_signed_transaction( wallet_rpc_environment: WalletRpcTestEnvironment, - output_args: List[Tuple[int, Optional[List[str]]]], + output_args: list[tuple[int, Optional[list[str]]]], fee: int, select_coin: bool, is_cat: bool, @@ -609,7 +609,7 @@ async def test_create_signed_transaction( await time_out_assert(20, get_confirmed_balance, generated_funds - amount_total, wallet_1_rpc, wallet_id) # Assert every coin comes from the same parent - additions: List[Coin] = spend_bundle.additions() + additions: list[Coin] = spend_bundle.additions() assert len({c.parent_coin_info for c in additions}) == 2 if is_cat else 1 # Assert you can get the spend for each addition @@ -622,8 +622,8 @@ async def test_create_signed_transaction( assert spend is not None # Assert the memos are all correct - addition_dict: Dict[bytes32, Coin] = {addition.name(): addition for addition in additions} - memo_dictionary: Dict[bytes32, List[bytes]] = compute_memos(spend_bundle) + addition_dict: dict[bytes32, Coin] = {addition.name(): addition for addition in additions} + memo_dictionary: dict[bytes32, list[bytes]] = compute_memos(spend_bundle) for output in outputs: if "memos" in output: found: bool = False @@ -1222,7 +1222,7 @@ async def test_offer_endpoints(wallet_rpc_environment: WalletRpcTestEnvironment) assert spend_bundle is not None await farm_transaction(full_node_api, wallet_node, spend_bundle) await time_out_assert(5, get_confirmed_balance, 4, wallet_2_rpc, cat_wallet_id) - test_crs: List[CoinRecord] = await wallet_1_rpc.get_coin_records_by_names( + test_crs: list[CoinRecord] = await wallet_1_rpc.get_coin_records_by_names( [a.name() for a in spend_bundle.additions() if a.amount != 4] ) for cr in test_crs: @@ -1236,7 +1236,7 @@ async def test_offer_endpoints(wallet_rpc_environment: WalletRpcTestEnvironment) all_offers = await wallet_1_rpc.get_all_offers() assert len(all_offers) == 0 - driver_dict: Dict[str, Any] = {cat_asset_id.hex(): {"type": "CAT", "tail": "0x" + cat_asset_id.hex()}} + driver_dict: dict[str, Any] = {cat_asset_id.hex(): {"type": "CAT", "tail": "0x" + cat_asset_id.hex()}} create_res = await wallet_1_rpc.create_offer_for_ids( {uint32(1): -5, cat_asset_id.hex(): 1}, @@ -1842,8 +1842,8 @@ async def test_select_coins_rpc(wallet_rpc_environment: WalletRpcTestEnvironment funds = await generate_funds(full_node_api, env.wallet_1) addr = encode_puzzle_hash(await wallet_2.get_new_puzzlehash(), "txch") - coin_300: List[Coin] - tx_amounts: List[uint64] = [uint64(1000), uint64(300), uint64(1000), uint64(1000), uint64(10000)] + coin_300: list[Coin] + tx_amounts: list[uint64] = [uint64(1000), uint64(300), uint64(1000), uint64(1000), uint64(10000)] for tx_amount in tx_amounts: funds -= tx_amount # create coins for tests @@ -1859,7 +1859,7 @@ async def test_select_coins_rpc(wallet_rpc_environment: WalletRpcTestEnvironment await time_out_assert(20, get_confirmed_balance, funds, client, 1) # test min coin amount - min_coins: List[Coin] = await client_2.select_coins( + min_coins: list[Coin] = await client_2.select_coins( amount=1000, wallet_id=1, coin_selection_config=DEFAULT_COIN_SELECTION_CONFIG.override(min_coin_amount=uint64(1001)), @@ -1868,7 +1868,7 @@ async def test_select_coins_rpc(wallet_rpc_environment: WalletRpcTestEnvironment assert len(min_coins) == 1 and min_coins[0].amount == uint64(10000) # test max coin amount - max_coins: List[Coin] = await client_2.select_coins( + max_coins: list[Coin] = await client_2.select_coins( amount=2000, wallet_id=1, coin_selection_config=DEFAULT_COIN_SELECTION_CONFIG.override( @@ -1880,7 +1880,7 @@ async def test_select_coins_rpc(wallet_rpc_environment: WalletRpcTestEnvironment # test excluded coin amounts non_1000_amt: int = sum(a for a in tx_amounts if a != 1000) - excluded_amt_coins: List[Coin] = await client_2.select_coins( + excluded_amt_coins: list[Coin] = await client_2.select_coins( amount=non_1000_amt, wallet_id=1, coin_selection_config=DEFAULT_COIN_SELECTION_CONFIG.override(excluded_coin_amounts=[uint64(1000)]), @@ -1963,7 +1963,7 @@ async def run_test_case( test_case: str, test_request: GetCoinRecords, test_total_count: Optional[int], - test_records: List[WalletCoinRecord], + test_records: list[WalletCoinRecord], ): response = await client.get_coin_records(test_request) assert response["coin_records"] == [coin.to_json_dict_parsed_metadata() for coin in test_records], test_case @@ -2324,8 +2324,8 @@ async def test_notification_rpcs(wallet_rpc_environment: WalletRpcTestEnvironmen @pytest.mark.limit_consensus_modes(reason="irrelevant") async def test_verify_signature( wallet_rpc_environment: WalletRpcTestEnvironment, - rpc_request: Dict[str, Any], - rpc_response: Dict[str, Any], + rpc_request: dict[str, Any], + rpc_response: dict[str, Any], prefix_hex_strings: bool, ): rpc_server: Optional[RpcServer] = wallet_rpc_environment.wallet_1.service.rpc_server diff --git a/chia/_tests/wallet/simple_sync/test_simple_sync_protocol.py b/chia/_tests/wallet/simple_sync/test_simple_sync_protocol.py index 836e57816c82..487c8cce8997 100644 --- a/chia/_tests/wallet/simple_sync/test_simple_sync_protocol.py +++ b/chia/_tests/wallet/simple_sync/test_simple_sync_protocol.py @@ -1,7 +1,6 @@ from __future__ import annotations import asyncio -from typing import List import pytest from clvm.casts import int_to_bytes @@ -31,7 +30,7 @@ zero_ph = bytes32(32 * b"\0") -async def get_all_messages_in_queue(queue: asyncio.Queue[Message]) -> List[Message]: +async def get_all_messages_in_queue(queue: asyncio.Queue[Message]) -> list[Message]: all_messages = [] await asyncio.sleep(2) while not queue.empty(): @@ -360,7 +359,7 @@ async def test_subscribe_for_ph_reorg(simulator_and_wallet: OldSimulatorsAndWall all_messages = await get_all_messages_in_queue(incoming_queue) - coin_update_messages: List[CoinStateUpdate] = [] + coin_update_messages: list[CoinStateUpdate] = [] for message in all_messages: if message.type == ProtocolMessageTypes.coin_state_update.value: coin_state_update = CoinStateUpdate.from_bytes(message.data) @@ -438,7 +437,7 @@ async def test_subscribe_for_coin_id_reorg(simulator_and_wallet: OldSimulatorsAn all_messages = await get_all_messages_in_queue(incoming_queue) - coin_update_messages: List[CoinStateUpdate] = [] + coin_update_messages: list[CoinStateUpdate] = [] for message in all_messages: if message.type == ProtocolMessageTypes.coin_state_update.value: coin_state_update = CoinStateUpdate.from_bytes(message.data) @@ -621,7 +620,7 @@ async def test_subscribe_for_hint_long_sync( all_messages = await get_all_messages_in_queue(incoming_queue) all_messages_1 = await get_all_messages_in_queue(incoming_queue_1) - def check_messages_for_hint(messages: List[Message]) -> None: + def check_messages_for_hint(messages: list[Message]) -> None: notified_state = None for message in messages: diff --git a/chia/_tests/wallet/sync/test_wallet_sync.py b/chia/_tests/wallet/sync/test_wallet_sync.py index db8848b674f7..5f31a3868d9e 100644 --- a/chia/_tests/wallet/sync/test_wallet_sync.py +++ b/chia/_tests/wallet/sync/test_wallet_sync.py @@ -3,8 +3,9 @@ import asyncio import functools import logging +from collections.abc import Awaitable from dataclasses import dataclass -from typing import Awaitable, Callable, List, Optional +from typing import Callable, Optional from unittest.mock import MagicMock import pytest @@ -71,7 +72,7 @@ async def get_nft_count(wallet: NFTWallet) -> int: @pytest.mark.limit_consensus_modes(reason="save time") @pytest.mark.anyio async def test_request_block_headers( - simulator_and_wallet: OldSimulatorsAndWallets, default_400_blocks: List[FullBlock] + simulator_and_wallet: OldSimulatorsAndWallets, default_400_blocks: list[FullBlock] ) -> None: # Tests the edge case of receiving funds right before the recent blocks in weight proof [full_node_api], [(wallet_node, _)], bt = simulator_and_wallet @@ -108,7 +109,7 @@ async def test_request_block_headers( # ) @pytest.mark.anyio async def test_request_block_headers_rejected( - simulator_and_wallet: OldSimulatorsAndWallets, default_400_blocks: List[FullBlock] + simulator_and_wallet: OldSimulatorsAndWallets, default_400_blocks: list[FullBlock] ) -> None: # Tests the edge case of receiving funds right before the recent blocks in weight proof [full_node_api], _, _ = simulator_and_wallet @@ -155,7 +156,7 @@ async def test_request_block_headers_rejected( @pytest.mark.anyio async def test_basic_sync_wallet( two_wallet_nodes: OldSimulatorsAndWallets, - default_400_blocks: List[FullBlock], + default_400_blocks: list[FullBlock], self_hostname: str, use_delta_sync: bool, ) -> None: @@ -206,7 +207,7 @@ async def test_basic_sync_wallet( @pytest.mark.anyio async def test_almost_recent( two_wallet_nodes: OldSimulatorsAndWallets, - default_400_blocks: List[FullBlock], + default_400_blocks: list[FullBlock], self_hostname: str, blockchain_constants: ConsensusConstants, use_delta_sync: bool, @@ -256,7 +257,7 @@ async def test_almost_recent( @pytest.mark.anyio async def test_backtrack_sync_wallet( two_wallet_nodes: OldSimulatorsAndWallets, - default_400_blocks: List[FullBlock], + default_400_blocks: list[FullBlock], self_hostname: str, use_delta_sync: bool, ) -> None: @@ -286,7 +287,7 @@ async def test_backtrack_sync_wallet( @pytest.mark.anyio async def test_short_batch_sync_wallet( two_wallet_nodes: OldSimulatorsAndWallets, - default_400_blocks: List[FullBlock], + default_400_blocks: list[FullBlock], self_hostname: str, use_delta_sync: bool, ) -> None: @@ -315,8 +316,8 @@ async def test_short_batch_sync_wallet( @pytest.mark.anyio async def test_long_sync_wallet( two_wallet_nodes: OldSimulatorsAndWallets, - default_1000_blocks: List[FullBlock], - default_400_blocks: List[FullBlock], + default_1000_blocks: list[FullBlock], + default_400_blocks: list[FullBlock], self_hostname: str, use_delta_sync: bool, ) -> None: @@ -377,7 +378,7 @@ async def test_long_sync_wallet( @pytest.mark.anyio async def test_wallet_reorg_sync( two_wallet_nodes: OldSimulatorsAndWallets, - default_400_blocks: List[FullBlock], + default_400_blocks: list[FullBlock], self_hostname: str, use_delta_sync: bool, ) -> None: @@ -436,7 +437,7 @@ async def test_wallet_reorg_sync( @pytest.mark.limit_consensus_modes(reason="save time") @pytest.mark.anyio async def test_wallet_reorg_get_coinbase( - two_wallet_nodes: OldSimulatorsAndWallets, default_400_blocks: List[FullBlock], self_hostname: str + two_wallet_nodes: OldSimulatorsAndWallets, default_400_blocks: list[FullBlock], self_hostname: str ) -> None: [full_node_api], wallets, bt = two_wallet_nodes full_node = full_node_api.full_node @@ -571,7 +572,7 @@ async def test_request_additions_success(simulator_and_wallet: OldSimulatorsAndW await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node, timeout=20) - payees: List[Payment] = [] + payees: list[Payment] = [] for i in range(10): payee_ph = await wallet.get_new_puzzlehash() payees.append(Payment(payee_ph, uint64(i + 100))) @@ -643,7 +644,7 @@ async def test_request_additions_success(simulator_and_wallet: OldSimulatorsAndW @pytest.mark.anyio async def test_get_wp_fork_point( - default_10000_blocks: List[FullBlock], blockchain_constants: ConsensusConstants + default_10000_blocks: list[FullBlock], blockchain_constants: ConsensusConstants ) -> None: blocks = default_10000_blocks header_cache, height_to_hash, sub_blocks, summaries = await load_blocks_dont_validate(blocks, blockchain_constants) @@ -786,7 +787,7 @@ async def test_dusted_wallet( await full_node_api.wait_for_wallets_synced(wallet_nodes=[farm_wallet_node, dust_wallet_node], timeout=20) # Part 1: create a single dust coin - payees: List[Payment] = [] + payees: list[Payment] = [] payee_ph = await dust_wallet.get_new_puzzlehash() payees.append(Payment(payee_ph, uint64(dust_value))) @@ -1260,11 +1261,11 @@ class FlakinessInfo: def flaky_get_coin_state( flakiness_info: FlakinessInfo, - func: Callable[[List[bytes32], WSChiaConnection, Optional[uint32]], Awaitable[List[CoinState]]], - ) -> Callable[[List[bytes32], WSChiaConnection, Optional[uint32]], Awaitable[List[CoinState]]]: + func: Callable[[list[bytes32], WSChiaConnection, Optional[uint32]], Awaitable[list[CoinState]]], + ) -> Callable[[list[bytes32], WSChiaConnection, Optional[uint32]], Awaitable[list[CoinState]]]: async def new_func( - coin_names: List[bytes32], peer: WSChiaConnection, fork_height: Optional[uint32] = None - ) -> List[CoinState]: + coin_names: list[bytes32], peer: WSChiaConnection, fork_height: Optional[uint32] = None + ) -> list[CoinState]: if flakiness_info.coin_state_flaky: flakiness_info.coin_state_flaky = False raise PeerRequestException() @@ -1293,11 +1294,11 @@ async def new_func(request: wallet_protocol.RequestPuzzleSolution) -> Optional[M def flaky_fetch_children( flakiness_info: FlakinessInfo, - func: Callable[[bytes32, WSChiaConnection, Optional[uint32]], Awaitable[List[CoinState]]], - ) -> Callable[[bytes32, WSChiaConnection, Optional[uint32]], Awaitable[List[CoinState]]]: + func: Callable[[bytes32, WSChiaConnection, Optional[uint32]], Awaitable[list[CoinState]]], + ) -> Callable[[bytes32, WSChiaConnection, Optional[uint32]], Awaitable[list[CoinState]]]: async def new_func( coin_name: bytes32, peer: WSChiaConnection, fork_height: Optional[uint32] = None - ) -> List[CoinState]: + ) -> list[CoinState]: if flakiness_info.fetch_children_flaky: flakiness_info.fetch_children_flaky = False raise PeerRequestException() @@ -1403,7 +1404,7 @@ async def tx_in_mempool() -> bool: @pytest.mark.skip("the test fails with 'wallet_state_manager not assigned'. This test doesn't work, skip it for now") async def test_bad_peak_mismatch( two_wallet_nodes: OldSimulatorsAndWallets, - default_1000_blocks: List[FullBlock], + default_1000_blocks: list[FullBlock], self_hostname: str, blockchain_constants: ConsensusConstants, monkeypatch: pytest.MonkeyPatch, @@ -1462,8 +1463,8 @@ async def test_bad_peak_mismatch( @pytest.mark.anyio async def test_long_sync_untrusted_break( setup_two_nodes_and_wallet: OldSimulatorsAndWallets, - default_1000_blocks: List[FullBlock], - default_400_blocks: List[FullBlock], + default_1000_blocks: list[FullBlock], + default_400_blocks: list[FullBlock], self_hostname: str, caplog: pytest.LogCaptureFixture, monkeypatch: pytest.MonkeyPatch, diff --git a/chia/_tests/wallet/test_address_type.py b/chia/_tests/wallet/test_address_type.py index 1cf9529f7f97..1de1f7fbdc24 100644 --- a/chia/_tests/wallet/test_address_type.py +++ b/chia/_tests/wallet/test_address_type.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Any, Dict +from typing import Any import pytest @@ -8,19 +8,19 @@ @pytest.mark.parametrize("prefix", [None]) -def test_xch_hrp_for_default_config(config_with_address_prefix: Dict[str, Any]) -> None: +def test_xch_hrp_for_default_config(config_with_address_prefix: dict[str, Any]) -> None: config = config_with_address_prefix assert AddressType.XCH.hrp(config) == "xch" @pytest.mark.parametrize("prefix", ["txch"]) -def test_txch_hrp_for_testnet(config_with_address_prefix: Dict[str, Any]) -> None: +def test_txch_hrp_for_testnet(config_with_address_prefix: dict[str, Any]) -> None: config = config_with_address_prefix assert AddressType.XCH.hrp(config) == "txch" @pytest.mark.parametrize("prefix", [None]) -def test_is_valid_address_xch(config_with_address_prefix: Dict[str, Any]) -> None: +def test_is_valid_address_xch(config_with_address_prefix: dict[str, Any]) -> None: config = config_with_address_prefix valid = is_valid_address( "xch1mnr0ygu7lvmk3nfgzmncfk39fwu0dv933yrcv97nd6pmrt7fzmhs8taffd", allowed_types={AddressType.XCH}, config=config @@ -29,7 +29,7 @@ def test_is_valid_address_xch(config_with_address_prefix: Dict[str, Any]) -> Non @pytest.mark.parametrize("prefix", ["txch"]) -def test_is_valid_address_txch(config_with_address_prefix: Dict[str, Any]) -> None: +def test_is_valid_address_txch(config_with_address_prefix: dict[str, Any]) -> None: config = config_with_address_prefix # TXCH address validation requires a config valid = is_valid_address( @@ -41,7 +41,7 @@ def test_is_valid_address_txch(config_with_address_prefix: Dict[str, Any]) -> No @pytest.mark.parametrize("prefix", [None]) -def test_is_valid_address_xch_bad_address(config_with_address_prefix: Dict[str, Any]) -> None: +def test_is_valid_address_xch_bad_address(config_with_address_prefix: dict[str, Any]) -> None: config = config_with_address_prefix valid = is_valid_address( "xch1mnr0ygu7lvmk3nfgzmncfk39fwu0dv933yrcv97nd6pmrt7fzmhs8xxxxx", allowed_types={AddressType.XCH}, config=config @@ -50,7 +50,7 @@ def test_is_valid_address_xch_bad_address(config_with_address_prefix: Dict[str, @pytest.mark.parametrize("prefix", [None]) -def test_is_valid_address_nft(config_with_address_prefix: Dict[str, Any]) -> None: +def test_is_valid_address_nft(config_with_address_prefix: dict[str, Any]) -> None: config = config_with_address_prefix valid = is_valid_address( "nft1mx2nkvml2eekjtqwdmxvmf3js8g083hpszzhkhtwvhcss8efqzhqtza773", allowed_types={AddressType.NFT}, config=config @@ -59,7 +59,7 @@ def test_is_valid_address_nft(config_with_address_prefix: Dict[str, Any]) -> Non @pytest.mark.parametrize("prefix", ["txch"]) -def test_is_valid_address_nft_with_testnet(config_with_address_prefix: Dict[str, Any]) -> None: +def test_is_valid_address_nft_with_testnet(config_with_address_prefix: dict[str, Any]) -> None: config = config_with_address_prefix valid = is_valid_address( "nft1mx2nkvml2eekjtqwdmxvmf3js8g083hpszzhkhtwvhcss8efqzhqtza773", allowed_types={AddressType.NFT}, config=config @@ -68,7 +68,7 @@ def test_is_valid_address_nft_with_testnet(config_with_address_prefix: Dict[str, @pytest.mark.parametrize("prefix", [None]) -def test_is_valid_address_nft_bad_address(config_with_address_prefix: Dict[str, Any]) -> None: +def test_is_valid_address_nft_bad_address(config_with_address_prefix: dict[str, Any]) -> None: config = config_with_address_prefix valid = is_valid_address( "nft1mx2nkvml2eekjtqwdmxvmf3js8g083hpszzhkhtwvhcss8efqzhqtxxxxx", allowed_types={AddressType.NFT}, config=config @@ -77,7 +77,7 @@ def test_is_valid_address_nft_bad_address(config_with_address_prefix: Dict[str, @pytest.mark.parametrize("prefix", [None]) -def test_is_valid_address_did(config_with_address_prefix: Dict[str, Any]) -> None: +def test_is_valid_address_did(config_with_address_prefix: dict[str, Any]) -> None: config = config_with_address_prefix valid = is_valid_address( "did:chia:14jxdtqcyp3gk8ka0678eq8mmtnktgpmp2vuqq3vtsl2e5qr7fyrsr9gsr7", @@ -88,7 +88,7 @@ def test_is_valid_address_did(config_with_address_prefix: Dict[str, Any]) -> Non @pytest.mark.parametrize("prefix", ["txch"]) -def test_is_valid_address_did_with_testnet(config_with_address_prefix: Dict[str, Any]) -> None: +def test_is_valid_address_did_with_testnet(config_with_address_prefix: dict[str, Any]) -> None: config = config_with_address_prefix valid = is_valid_address( "did:chia:14jxdtqcyp3gk8ka0678eq8mmtnktgpmp2vuqq3vtsl2e5qr7fyrsr9gsr7", @@ -99,7 +99,7 @@ def test_is_valid_address_did_with_testnet(config_with_address_prefix: Dict[str, @pytest.mark.parametrize("prefix", [None]) -def test_is_valid_address_did_bad_address(config_with_address_prefix: Dict[str, Any]) -> None: +def test_is_valid_address_did_bad_address(config_with_address_prefix: dict[str, Any]) -> None: config = config_with_address_prefix valid = is_valid_address( "did:chia:14jxdtqcyp3gk8ka0678eq8mmtnktgpmp2vuqq3vtsl2e5qr7fyrsrxxxxx", @@ -110,7 +110,7 @@ def test_is_valid_address_did_bad_address(config_with_address_prefix: Dict[str, @pytest.mark.parametrize("prefix", [None]) -def test_ensure_valid_address_xch(config_with_address_prefix: Dict[str, Any]) -> None: +def test_ensure_valid_address_xch(config_with_address_prefix: dict[str, Any]) -> None: config = config_with_address_prefix address = ensure_valid_address( "xch1mnr0ygu7lvmk3nfgzmncfk39fwu0dv933yrcv97nd6pmrt7fzmhs8taffd", allowed_types={AddressType.XCH}, config=config @@ -119,7 +119,7 @@ def test_ensure_valid_address_xch(config_with_address_prefix: Dict[str, Any]) -> @pytest.mark.parametrize("prefix", ["txch"]) -def test_ensure_valid_address_txch(config_with_address_prefix: Dict[str, Any]) -> None: +def test_ensure_valid_address_txch(config_with_address_prefix: dict[str, Any]) -> None: config = config_with_address_prefix address = ensure_valid_address( "txch1mnr0ygu7lvmk3nfgzmncfk39fwu0dv933yrcv97nd6pmrt7fzmhs2v6lg7", @@ -130,7 +130,7 @@ def test_ensure_valid_address_txch(config_with_address_prefix: Dict[str, Any]) - @pytest.mark.parametrize("prefix", [None]) -def test_ensure_valid_address_xch_bad_address(config_with_address_prefix: Dict[str, Any]) -> None: +def test_ensure_valid_address_xch_bad_address(config_with_address_prefix: dict[str, Any]) -> None: config = config_with_address_prefix with pytest.raises(ValueError): ensure_valid_address( @@ -141,7 +141,7 @@ def test_ensure_valid_address_xch_bad_address(config_with_address_prefix: Dict[s @pytest.mark.parametrize("prefix", [None]) -def test_ensure_valid_address_nft(config_with_address_prefix: Dict[str, Any]) -> None: +def test_ensure_valid_address_nft(config_with_address_prefix: dict[str, Any]) -> None: config = config_with_address_prefix address = ensure_valid_address( "nft1mx2nkvml2eekjtqwdmxvmf3js8g083hpszzhkhtwvhcss8efqzhqtza773", allowed_types={AddressType.NFT}, config=config @@ -150,7 +150,7 @@ def test_ensure_valid_address_nft(config_with_address_prefix: Dict[str, Any]) -> @pytest.mark.parametrize("prefix", [None]) -def test_ensure_valid_address_nft_bad_address(config_with_address_prefix: Dict[str, Any]) -> None: +def test_ensure_valid_address_nft_bad_address(config_with_address_prefix: dict[str, Any]) -> None: config = config_with_address_prefix with pytest.raises(ValueError): ensure_valid_address( @@ -161,7 +161,7 @@ def test_ensure_valid_address_nft_bad_address(config_with_address_prefix: Dict[s @pytest.mark.parametrize("prefix", [None]) -def test_ensure_valid_address_did(config_with_address_prefix: Dict[str, Any]) -> None: +def test_ensure_valid_address_did(config_with_address_prefix: dict[str, Any]) -> None: config = config_with_address_prefix address = ensure_valid_address( "did:chia:14jxdtqcyp3gk8ka0678eq8mmtnktgpmp2vuqq3vtsl2e5qr7fyrsr9gsr7", @@ -172,7 +172,7 @@ def test_ensure_valid_address_did(config_with_address_prefix: Dict[str, Any]) -> @pytest.mark.parametrize("prefix", [None]) -def test_ensure_valid_address_did_bad_address(config_with_address_prefix: Dict[str, Any]) -> None: +def test_ensure_valid_address_did_bad_address(config_with_address_prefix: dict[str, Any]) -> None: config = config_with_address_prefix with pytest.raises(ValueError): ensure_valid_address( @@ -183,7 +183,7 @@ def test_ensure_valid_address_did_bad_address(config_with_address_prefix: Dict[s @pytest.mark.parametrize("prefix", [None]) -def test_ensure_valid_address_bad_length(config_with_address_prefix: Dict[str, Any]) -> None: +def test_ensure_valid_address_bad_length(config_with_address_prefix: dict[str, Any]) -> None: config = config_with_address_prefix with pytest.raises(ValueError): ensure_valid_address("xch1qqqqqqqqqqqqqqqqwygzk5", allowed_types={AddressType.XCH}, config=config) diff --git a/chia/_tests/wallet/test_clvm_streamable.py b/chia/_tests/wallet/test_clvm_streamable.py index 60fe2d4d8578..23eb592ba786 100644 --- a/chia/_tests/wallet/test_clvm_streamable.py +++ b/chia/_tests/wallet/test_clvm_streamable.py @@ -1,7 +1,7 @@ from __future__ import annotations import dataclasses -from typing import List, Optional, Tuple +from typing import Optional import pytest @@ -78,14 +78,14 @@ def test_nested_serialization() -> None: @dataclasses.dataclass(frozen=True) class Compound(Streamable): optional: Optional[BasicCLVMStreamable] - list: List[BasicCLVMStreamable] + list: list[BasicCLVMStreamable] @clvm_streamable @dataclasses.dataclass(frozen=True) class CompoundCLVM(Streamable): optional: Optional[BasicCLVMStreamable] - list: List[BasicCLVMStreamable] + list: list[BasicCLVMStreamable] def test_compound_type_serialization() -> None: @@ -153,7 +153,7 @@ def test_compound_type_serialization() -> None: @clvm_streamable @dataclasses.dataclass(frozen=True) class DoesntWork(Streamable): - tuples_are_not_supported: Tuple[str] + tuples_are_not_supported: tuple[str] @clvm_streamable diff --git a/chia/_tests/wallet/test_coin_selection.py b/chia/_tests/wallet/test_coin_selection.py index 064c63794770..5eaf95deec96 100644 --- a/chia/_tests/wallet/test_coin_selection.py +++ b/chia/_tests/wallet/test_coin_selection.py @@ -3,7 +3,6 @@ import logging import time from random import randrange -from typing import List, Set import pytest @@ -47,7 +46,7 @@ def test_knapsack_coin_selection(self, a_hash: bytes32) -> None: coins_to_append = 1000 amounts = list(range(1, coins_to_append)) amounts.sort(reverse=True) - coin_list: List[Coin] = [Coin(a_hash, a_hash, uint64(100000000 * a)) for a in amounts] + coin_list: list[Coin] = [Coin(a_hash, a_hash, uint64(100000000 * a)) for a in amounts] for i in range(tries): knapsack = knapsack_coin_algorithm( coin_list, uint128(30000000000000), DEFAULT_CONSTANTS.MAX_COIN_AMOUNT, 999999, seed=bytes([i]) @@ -58,7 +57,7 @@ def test_knapsack_coin_selection(self, a_hash: bytes32) -> None: def test_knapsack_coin_selection_2(self, a_hash: bytes32) -> None: coin_amounts = [6, 20, 40, 80, 150, 160, 203, 202, 201, 320] coin_amounts.sort(reverse=True) - coin_list: List[Coin] = [Coin(a_hash, a_hash, uint64(a)) for a in coin_amounts] + coin_list: list[Coin] = [Coin(a_hash, a_hash, uint64(a)) for a in coin_amounts] # coin_list = set([coin for a in coin_amounts]) for i in range(100): knapsack = knapsack_coin_algorithm( @@ -81,12 +80,12 @@ async def test_coin_selection_randomly(self, a_hash: bytes32) -> None: spendable_amount += c_amount spendable_amount = uint128(spendable_amount) - coin_list: List[WalletCoinRecord] = [ + coin_list: list[WalletCoinRecord] = [ WalletCoinRecord(Coin(a_hash, a_hash, uint64(a)), uint32(1), uint32(1), False, True, WalletType(0), 1) for a in coin_amounts ] for target_amount in coin_amounts[:100]: # select the first 100 values - result: Set[Coin] = await select_coins( + result: set[Coin] = await select_coins( spendable_amount, DEFAULT_COIN_SELECTION_CONFIG, coin_list, @@ -101,7 +100,7 @@ async def test_coin_selection_randomly(self, a_hash: bytes32) -> None: @pytest.mark.anyio async def test_coin_selection_with_dust(self, a_hash: bytes32) -> None: spendable_amount = uint128(5000000000000 + 10000) - coin_list: List[WalletCoinRecord] = [ + coin_list: list[WalletCoinRecord] = [ WalletCoinRecord( Coin(a_hash, a_hash, uint64(5000000000000)), uint32(1), uint32(1), False, True, WalletType(0), 1 ) @@ -121,7 +120,7 @@ async def test_coin_selection_with_dust(self, a_hash: bytes32) -> None: # make sure coins are not identical. for target_amount in [10000, 9999]: print("Target amount: ", target_amount) - result: Set[Coin] = await select_coins( + result: set[Coin] = await select_coins( spendable_amount, DEFAULT_COIN_SELECTION_CONFIG, coin_list, @@ -147,7 +146,7 @@ async def test_coin_selection_with_dust(self, a_hash: bytes32) -> None: ) spendable_amount = uint128(spendable_amount + 2000 * 100) for target_amount in [50000, 25000, 15000, 10000, 9000, 3000]: # select the first 100 values - dusty_result: Set[Coin] = await select_coins( + dusty_result: set[Coin] = await select_coins( spendable_amount, DEFAULT_COIN_SELECTION_CONFIG, coin_list, @@ -163,7 +162,7 @@ async def test_coin_selection_with_dust(self, a_hash: bytes32) -> None: # test when we have multiple coins under target, and a lot of dust coins. spendable_amount = uint128(25000 + 10000) - new_coin_list: List[WalletCoinRecord] = [] + new_coin_list: list[WalletCoinRecord] = [] for i in range(5): new_coin_list.append( WalletCoinRecord( @@ -190,7 +189,7 @@ async def test_coin_selection_with_dust(self, a_hash: bytes32) -> None: ) ) for target_amount in [20000, 15000, 10000, 5000]: # select the first 100 values - dusty_below_target: Set[Coin] = await select_coins( + dusty_below_target: set[Coin] = await select_coins( spendable_amount, DEFAULT_COIN_SELECTION_CONFIG, new_coin_list, @@ -208,7 +207,7 @@ async def test_coin_selection_with_dust(self, a_hash: bytes32) -> None: async def test_dust_and_one_large_coin(self, a_hash: bytes32) -> None: # test when we have a lot of dust and 1 large coin spendable_amount = uint128(50000 + 10000) - new_coin_list: List[WalletCoinRecord] = [ + new_coin_list: list[WalletCoinRecord] = [ WalletCoinRecord( Coin(a_hash, std_hash(b"123"), uint64(50000)), uint32(1), uint32(1), False, True, WalletType(0), 1 ) @@ -227,7 +226,7 @@ async def test_dust_and_one_large_coin(self, a_hash: bytes32) -> None: ) ) for target_amount in [50000, 10001, 10000, 9999]: - dusty_below_target: Set[Coin] = await select_coins( + dusty_below_target: set[Coin] = await select_coins( spendable_amount, DEFAULT_COIN_SELECTION_CONFIG, new_coin_list, @@ -242,7 +241,7 @@ async def test_dust_and_one_large_coin(self, a_hash: bytes32) -> None: @pytest.mark.anyio async def test_coin_selection_failure(self, a_hash: bytes32) -> None: spendable_amount = uint128(10000) - coin_list: List[WalletCoinRecord] = [] + coin_list: list[WalletCoinRecord] = [] for i in range(10000): coin_list.append( WalletCoinRecord( @@ -282,7 +281,7 @@ async def test_coin_selection_failure(self, a_hash: bytes32) -> None: @pytest.mark.anyio async def test_coin_selection(self, a_hash: bytes32) -> None: coin_amounts = [3, 6, 20, 40, 80, 150, 160, 203, 202, 201, 320] - coin_list: List[WalletCoinRecord] = [ + coin_list: list[WalletCoinRecord] = [ WalletCoinRecord(Coin(a_hash, a_hash, uint64(a)), uint32(1), uint32(1), False, True, WalletType(0), 1) for a in coin_amounts ] @@ -290,7 +289,7 @@ async def test_coin_selection(self, a_hash: bytes32) -> None: # check for exact match target_amount = uint128(40) - exact_match_result: Set[Coin] = await select_coins( + exact_match_result: set[Coin] = await select_coins( spendable_amount, DEFAULT_COIN_SELECTION_CONFIG, coin_list, @@ -304,7 +303,7 @@ async def test_coin_selection(self, a_hash: bytes32) -> None: # check for match of 2 target_amount = uint128(153) - match_2: Set[Coin] = await select_coins( + match_2: set[Coin] = await select_coins( spendable_amount, DEFAULT_COIN_SELECTION_CONFIG, coin_list, @@ -317,7 +316,7 @@ async def test_coin_selection(self, a_hash: bytes32) -> None: assert len(match_2) == 2 # check for match of at least 3. it is random after all. target_amount = uint128(541) - match_3: Set[Coin] = await select_coins( + match_3: set[Coin] = await select_coins( spendable_amount, DEFAULT_COIN_SELECTION_CONFIG, coin_list, @@ -331,7 +330,7 @@ async def test_coin_selection(self, a_hash: bytes32) -> None: # check for match of all target_amount = spendable_amount - match_all: Set[Coin] = await select_coins( + match_all: set[Coin] = await select_coins( spendable_amount, DEFAULT_COIN_SELECTION_CONFIG, coin_list, @@ -345,13 +344,13 @@ async def test_coin_selection(self, a_hash: bytes32) -> None: # test smallest greater than target greater_coin_amounts = [1, 2, 5, 20, 400, 700] - greater_coin_list: List[WalletCoinRecord] = [ + greater_coin_list: list[WalletCoinRecord] = [ WalletCoinRecord(Coin(a_hash, a_hash, uint64(a)), uint32(1), uint32(1), False, True, WalletType(0), 1) for a in greater_coin_amounts ] greater_spendable_amount = uint128(sum(greater_coin_amounts)) target_amount = uint128(625) - smallest_result: Set[Coin] = await select_coins( + smallest_result: set[Coin] = await select_coins( greater_spendable_amount, DEFAULT_COIN_SELECTION_CONFIG, greater_coin_list, @@ -364,12 +363,12 @@ async def test_coin_selection(self, a_hash: bytes32) -> None: assert len(smallest_result) == 1 # test smallest greater than target with only 1 large coin. - single_greater_coin_list: List[WalletCoinRecord] = [ + single_greater_coin_list: list[WalletCoinRecord] = [ WalletCoinRecord(Coin(a_hash, a_hash, uint64(70000)), uint32(1), uint32(1), False, True, WalletType(0), 1) ] single_greater_spendable_amount = uint128(70000) target_amount = uint128(50000) - single_greater_result: Set[Coin] = await select_coins( + single_greater_result: set[Coin] = await select_coins( single_greater_spendable_amount, DEFAULT_COIN_SELECTION_CONFIG, single_greater_coin_list, @@ -383,13 +382,13 @@ async def test_coin_selection(self, a_hash: bytes32) -> None: # test smallest greater than target with only multiple larger then target coins. multiple_greater_coin_amounts = [90000, 100000, 120000, 200000, 100000] - multiple_greater_coin_list: List[WalletCoinRecord] = [ + multiple_greater_coin_list: list[WalletCoinRecord] = [ WalletCoinRecord(Coin(a_hash, a_hash, uint64(a)), uint32(1), uint32(1), False, True, WalletType(0), 1) for a in multiple_greater_coin_amounts ] multiple_greater_spendable_amount = uint128(sum(multiple_greater_coin_amounts)) target_amount = uint128(70000) - multiple_greater_result: Set[Coin] = await select_coins( + multiple_greater_result: set[Coin] = await select_coins( multiple_greater_spendable_amount, DEFAULT_COIN_SELECTION_CONFIG, multiple_greater_coin_list, @@ -406,7 +405,7 @@ async def test_coin_selection(self, a_hash: bytes32) -> None: async def test_coin_selection_difficult(self, a_hash: bytes32) -> None: num_coins = 40 spendable_amount = uint128(num_coins * 1000) - coin_list: List[WalletCoinRecord] = [ + coin_list: list[WalletCoinRecord] = [ WalletCoinRecord( Coin(a_hash, std_hash(i.to_bytes(4, "big")), uint64(1000)), uint32(1), @@ -419,7 +418,7 @@ async def test_coin_selection_difficult(self, a_hash: bytes32) -> None: for i in range(num_coins) ] target_amount = spendable_amount - 1 - result: Set[Coin] = await select_coins( + result: set[Coin] = await select_coins( spendable_amount, DEFAULT_COIN_SELECTION_CONFIG, coin_list, @@ -434,7 +433,7 @@ async def test_coin_selection_difficult(self, a_hash: bytes32) -> None: @pytest.mark.anyio async def test_smallest_coin_over_amount(self, a_hash: bytes32) -> None: - coin_list: List[Coin] = [ + coin_list: list[Coin] = [ Coin(a_hash, std_hash(i.to_bytes(4, "big")), uint64((39 - i) * 1000)) for i in range(40) ] assert select_smallest_coin_over_target(uint128(100), coin_list) == coin_list[39 - 1] @@ -446,7 +445,7 @@ async def test_smallest_coin_over_amount(self, a_hash: bytes32) -> None: @pytest.mark.anyio async def test_sum_largest_coins(self, a_hash: bytes32) -> None: - coin_list: List[Coin] = list( + coin_list: list[Coin] = list( reversed([Coin(a_hash, std_hash(i.to_bytes(4, "big")), uint64(i)) for i in range(41)]) ) assert sum_largest_coins(uint128(40), coin_list) == {coin_list[0]} @@ -456,7 +455,7 @@ async def test_sum_largest_coins(self, a_hash: bytes32) -> None: @pytest.mark.anyio async def test_knapsack_perf(self, a_hash: bytes32) -> None: start = time.time() - coin_list: List[Coin] = [ + coin_list: list[Coin] = [ Coin(a_hash, std_hash(i.to_bytes(4, "big")), uint64((200000 - i) * 1000)) for i in range(200000) ] knapsack_coin_algorithm(coin_list, uint128(2000000), 9999999999999999, 500) @@ -467,7 +466,7 @@ async def test_knapsack_perf(self, a_hash: bytes32) -> None: @pytest.mark.anyio async def test_coin_selection_min_coin(self, a_hash: bytes32) -> None: spendable_amount = uint128(5000000 + 500 + 40050) - coin_list: List[WalletCoinRecord] = [ + coin_list: list[WalletCoinRecord] = [ WalletCoinRecord(Coin(a_hash, a_hash, uint64(5000000)), uint32(1), uint32(1), False, True, WalletType(0), 1) ] for i in range(500): @@ -497,7 +496,7 @@ async def test_coin_selection_min_coin(self, a_hash: bytes32) -> None: # make sure coins are not identical. for target_amount in [500, 1000, 50000, 500000]: for min_coin_amount in [10, 100, 200, 300, 1000]: - result: Set[Coin] = await select_coins( + result: set[Coin] = await select_coins( spendable_amount, DEFAULT_COIN_SELECTION_CONFIG.override(min_coin_amount=uint64(min_coin_amount)), coin_list, @@ -529,7 +528,7 @@ async def test_coin_selection_with_excluded_coins(self) -> None: ] excluded_coins = [Coin(a_hash, a_hash, uint64(3)), Coin(c_hash, c_hash, uint64(9))] # test that excluded coins are not included in the result - selected_coins: Set[Coin] = await select_coins( + selected_coins: set[Coin] = await select_coins( spendable_amount, DEFAULT_COIN_SELECTION_CONFIG.override(excluded_coin_ids=[c.name() for c in excluded_coins]), spendable_wallet_coin_records, @@ -558,7 +557,7 @@ async def test_coin_selection_with_excluded_coins(self) -> None: @pytest.mark.anyio async def test_coin_selection_with_zero_amount(self, a_hash: bytes32) -> None: coin_amounts = [3, 6, 20, 40, 80, 150, 160, 203, 202, 201, 320] - coin_list: List[WalletCoinRecord] = [ + coin_list: list[WalletCoinRecord] = [ WalletCoinRecord(Coin(a_hash, a_hash, uint64(a)), uint32(1), uint32(1), False, True, WalletType(0), 1) for a in coin_amounts ] @@ -566,7 +565,7 @@ async def test_coin_selection_with_zero_amount(self, a_hash: bytes32) -> None: # validate that a zero amount is handled correctly target_amount = uint128(0) - zero_amount_result: Set[Coin] = await select_coins( + zero_amount_result: set[Coin] = await select_coins( spendable_amount, DEFAULT_COIN_SELECTION_CONFIG, coin_list, diff --git a/chia/_tests/wallet/test_conditions.py b/chia/_tests/wallet/test_conditions.py index 57f36ed49edd..1f17657c6781 100644 --- a/chia/_tests/wallet/test_conditions.py +++ b/chia/_tests/wallet/test_conditions.py @@ -1,7 +1,7 @@ from __future__ import annotations from dataclasses import dataclass -from typing import Any, List, Optional, Tuple, Type, Union +from typing import Any, Optional, Union import pytest from clvm.casts import int_from_bytes @@ -66,8 +66,8 @@ class ConditionSerializations: opcode: bytes program_args: Program - json_keys: List[str] - json_args: List[Any] + json_keys: list[str] + json_args: list[Any] @property def program(self) -> Program: @@ -203,9 +203,9 @@ def test_unknown_condition() -> None: ) def test_announcement_inversions( drivers: Union[ - Tuple[Type[CreateCoinAnnouncement], Type[AssertCoinAnnouncement]], - Tuple[Type[CreatePuzzleAnnouncement], Type[AssertPuzzleAnnouncement]], - Tuple[Type[CreateAnnouncement], Type[AssertAnnouncement]], + tuple[type[CreateCoinAnnouncement], type[AssertCoinAnnouncement]], + tuple[type[CreatePuzzleAnnouncement], type[AssertPuzzleAnnouncement]], + tuple[type[CreateAnnouncement], type[AssertAnnouncement]], ] ) -> None: create_driver, assert_driver = drivers @@ -236,9 +236,9 @@ def test_announcement_inversions( @dataclass(frozen=True) class TimelockInfo: - drivers: List[Condition] + drivers: list[Condition] parsed_info: ConditionValidTimes - conditions_after: Optional[List[Condition]] = None + conditions_after: Optional[list[Condition]] = None @pytest.mark.parametrize( @@ -336,7 +336,7 @@ def test_timelock_parsing(timelock_info: TimelockInfo) -> None: ], ) def test_invalid_condition( - cond: Type[ + cond: type[ Union[ AggSigParent, AggSigPuzzle, diff --git a/chia/_tests/wallet/test_new_wallet_protocol.py b/chia/_tests/wallet/test_new_wallet_protocol.py index 9ba62057ecf8..909279ae20e2 100644 --- a/chia/_tests/wallet/test_new_wallet_protocol.py +++ b/chia/_tests/wallet/test_new_wallet_protocol.py @@ -1,9 +1,11 @@ from __future__ import annotations from asyncio import Queue +from collections import OrderedDict +from collections.abc import AsyncGenerator from dataclasses import dataclass from random import Random -from typing import TYPE_CHECKING, AsyncGenerator, Dict, List, Optional, OrderedDict, Set, Tuple +from typing import Optional import pytest from chia_rs import AugSchemeMPL, Coin, CoinSpend, CoinState, Program @@ -32,19 +34,15 @@ IDENTITY_PUZZLE = Program.to(1) IDENTITY_PUZZLE_HASH = IDENTITY_PUZZLE.get_tree_hash() -OneNode = Tuple[List[SimulatorFullNodeService], List[WalletService], BlockTools] -# python 3.8 workaround follows - can be removed when 3.8 support is removed -if TYPE_CHECKING: - Mpu = Tuple[FullNodeSimulator, Queue[Message], WSChiaConnection] -else: - Mpu = Tuple[FullNodeSimulator, Queue, WSChiaConnection] +OneNode = tuple[list[SimulatorFullNodeService], list[WalletService], BlockTools] +Mpu = tuple[FullNodeSimulator, Queue[Message], WSChiaConnection] ALL_FILTER = wallet_protocol.CoinStateFilters(True, True, True, uint64(0)) async def connect_to_simulator( one_node: OneNode, self_hostname: str, mempool_updates: bool = True -) -> Tuple[FullNodeSimulator, Queue[Message], WSChiaConnection]: +) -> tuple[FullNodeSimulator, Queue[Message], WSChiaConnection]: [full_node_service], _, _ = one_node full_node_api = full_node_service._api @@ -417,8 +415,8 @@ async def test_request_puzzle_state(one_node: OneNode, self_hostname: str) -> No assert peak_header_hash is not None # Add coin records - coin_records: List[CoinRecord] = [] - puzzle_hashes: List[bytes32] = [] + coin_records: list[CoinRecord] = [] + puzzle_hashes: list[bytes32] = [] for ph_i in range(10): puzzle_hash = bytes32(ph_i.to_bytes(1, "big") * 32) @@ -621,14 +619,14 @@ async def test_request_puzzle_state_limit(one_node: OneNode, self_hostname: str) @dataclass(frozen=True) class PuzzleStateData: - coin_states: List[CoinState] + coin_states: list[CoinState] end_of_batch: bool previous_height: Optional[uint32] header_hash: bytes32 async def sync_puzzle_hashes( - puzzle_hashes: List[bytes32], + puzzle_hashes: list[bytes32], *, initial_previous_height: Optional[uint32], initial_header_hash: bytes32, @@ -693,9 +691,9 @@ async def test_sync_puzzle_state( simulator.full_node.config["max_subscribe_response_items"] = 7400 # Generate coin records - puzzle_hashes: List[bytes32] = [] - hints: List[Tuple[bytes32, bytes]] = [] - coin_records: Dict[bytes32, CoinRecord] = dict() + puzzle_hashes: list[bytes32] = [] + hints: list[tuple[bytes32, bytes]] = [] + coin_records: dict[bytes32, CoinRecord] = dict() rng = Random(0) @@ -736,7 +734,7 @@ async def test_sync_puzzle_state( async def run_test(include_spent: bool, include_unspent: bool, include_hinted: bool, min_amount: uint64) -> None: # Calculate expected coin records based on filters - expected_coin_records: Dict[bytes32, CoinRecord] = dict() + expected_coin_records: dict[bytes32, CoinRecord] = dict() for coin_id, coin_record in coin_records.items(): if not include_spent and coin_record.spent_block_index > 0: @@ -751,7 +749,7 @@ async def run_test(include_spent: bool, include_unspent: bool, include_hinted: b expected_coin_records[coin_id] = coin_record # Sync all coin states - coin_ids: Set[bytes32] = set() + coin_ids: set[bytes32] = set() last_height = -1 async for batch in sync_puzzle_hashes( @@ -784,7 +782,7 @@ async def run_test(include_spent: bool, include_unspent: bool, include_hinted: b await run_test(include_spent, include_unspent, include_hinted, uint64(min_amount)) -async def assert_mempool_added(queue: Queue[Message], transaction_ids: Set[bytes32]) -> None: +async def assert_mempool_added(queue: Queue[Message], transaction_ids: set[bytes32]) -> None: message = await queue.get() assert message.type == ProtocolMessageTypes.mempool_items_added.value @@ -794,7 +792,7 @@ async def assert_mempool_added(queue: Queue[Message], transaction_ids: Set[bytes async def assert_mempool_removed( queue: Queue[Message], - removed_items: Set[wallet_protocol.RemovedMempoolItem], + removed_items: set[wallet_protocol.RemovedMempoolItem], ) -> None: message = await queue.get() assert message.type == ProtocolMessageTypes.mempool_items_removed.value @@ -818,7 +816,7 @@ async def raw_mpu_setup(one_node: OneNode, self_hostname: str, no_capability: bo await simulator.farm_blocks_to_puzzlehash(1) await queue.get() - new_coins: List[Tuple[Coin, bytes32]] = [] + new_coins: list[tuple[Coin, bytes32]] = [] for i in range(10): puzzle = Program.to(2) @@ -847,7 +845,7 @@ async def raw_mpu_setup(one_node: OneNode, self_hostname: str, no_capability: bo return simulator, queue, peer -async def make_coin(full_node: FullNode) -> Tuple[Coin, bytes32]: +async def make_coin(full_node: FullNode) -> tuple[Coin, bytes32]: ph = IDENTITY_PUZZLE_HASH coin = Coin(bytes32(b"\0" * 32), ph, uint64(1000)) hint = bytes32(b"\0" * 32) diff --git a/chia/_tests/wallet/test_nft_store.py b/chia/_tests/wallet/test_nft_store.py index be2fa7bcfc1e..124f6297f0c3 100644 --- a/chia/_tests/wallet/test_nft_store.py +++ b/chia/_tests/wallet/test_nft_store.py @@ -2,7 +2,6 @@ import random from dataclasses import dataclass, field -from typing import Dict, List import pytest @@ -29,7 +28,7 @@ def get_dummy_nft(random_: random.Random) -> NFTCoinInfo: @dataclass class DummyNFTs: seeded_random: random.Random - nfts_per_wallet: Dict[uint32, List[NFTCoinInfo]] = field(default_factory=dict) + nfts_per_wallet: dict[uint32, list[NFTCoinInfo]] = field(default_factory=dict) def generate(self, wallet_id: int, count: int) -> None: nfts = self.nfts_per_wallet.setdefault(uint32(wallet_id), []) diff --git a/chia/_tests/wallet/test_offer_parsing_performance.py b/chia/_tests/wallet/test_offer_parsing_performance.py index 1322f790fab2..a3f195c8a978 100644 --- a/chia/_tests/wallet/test_offer_parsing_performance.py +++ b/chia/_tests/wallet/test_offer_parsing_performance.py @@ -1,8 +1,8 @@ from __future__ import annotations import cProfile +from collections.abc import Iterator from contextlib import contextmanager -from typing import Iterator from chia._tests.util.misc import BenchmarkRunner from chia.wallet.trading.offer import Offer diff --git a/chia/_tests/wallet/test_puzzle_store.py b/chia/_tests/wallet/test_puzzle_store.py index 5e862289ade2..3e02be62ab40 100644 --- a/chia/_tests/wallet/test_puzzle_store.py +++ b/chia/_tests/wallet/test_puzzle_store.py @@ -2,7 +2,6 @@ import random from dataclasses import dataclass, field -from typing import Dict, List import pytest from chia_rs import AugSchemeMPL @@ -29,8 +28,8 @@ def get_dummy_record(index: int, wallet_id: int, seeded_random: random.Random) - @dataclass class DummyDerivationRecords: seeded_random: random.Random - index_per_wallet: Dict[int, int] = field(default_factory=dict) - records_per_wallet: Dict[int, List[DerivationRecord]] = field(default_factory=dict) + index_per_wallet: dict[int, int] = field(default_factory=dict) + records_per_wallet: dict[int, list[DerivationRecord]] = field(default_factory=dict) def generate(self, wallet_id: int, count: int) -> None: records = self.records_per_wallet.setdefault(wallet_id, []) diff --git a/chia/_tests/wallet/test_signer_protocol.py b/chia/_tests/wallet/test_signer_protocol.py index 5896d02748bc..8fd753bac9b0 100644 --- a/chia/_tests/wallet/test_signer_protocol.py +++ b/chia/_tests/wallet/test_signer_protocol.py @@ -1,7 +1,7 @@ from __future__ import annotations import dataclasses -from typing import List, Optional +from typing import Optional import click import pytest @@ -180,7 +180,7 @@ async def test_p2dohp_wallet_signer_protocol(wallet_environments: WalletTestFram assert utx.signing_instructions.targets[0].fingerprint == synthetic_pubkey.get_fingerprint().to_bytes(4, "big") assert utx.signing_instructions.targets[0].message == message - signing_responses: List[SigningResponse] = ( + signing_responses: list[SigningResponse] = ( await wallet_rpc.execute_signing_instructions(ExecuteSigningInstructions(utx.signing_instructions)) ).signing_responses assert len(signing_responses) == 1 @@ -256,7 +256,7 @@ async def test_p2dohp_wallet_signer_protocol(wallet_environments: WalletTestFram == signing_responses[0].signature ) - signed_txs: List[SignedTransaction] = ( + signed_txs: list[SignedTransaction] = ( await wallet_rpc.apply_signatures( ApplySignatures(spends=[Spend.from_coin_spend(coin_spend)], signing_responses=signing_responses) ) @@ -322,7 +322,7 @@ async def test_p2blsdohp_execute_signing_instructions(wallet_environments: Walle # Test just a path hint test_name: bytes32 = std_hash(b"path hint only") child_sk: PrivateKey = _derive_path_unhardened(root_sk, [uint64(1), uint64(2), uint64(3), uint64(4)]) - signing_responses: List[SigningResponse] = await wallet.execute_signing_instructions( + signing_responses: list[SigningResponse] = await wallet.execute_signing_instructions( SigningInstructions( KeyHints( [], @@ -505,15 +505,15 @@ async def test_p2blsdohp_execute_signing_instructions(wallet_environments: Walle def test_blind_signer_translation_layer() -> None: - sum_hints: List[SumHint] = [ + sum_hints: list[SumHint] = [ SumHint([b"a", b"b", b"c"], b"offset", b"final"), SumHint([b"c", b"b", b"a"], b"offset2", b"final"), ] - path_hints: List[PathHint] = [ + path_hints: list[PathHint] = [ PathHint(b"root1", [uint64(1), uint64(2), uint64(3)]), PathHint(b"root2", [uint64(4), uint64(5), uint64(6)]), ] - signing_targets: List[SigningTarget] = [ + signing_targets: list[SigningTarget] = [ SigningTarget(b"pubkey", b"message", bytes32([0] * 32)), SigningTarget(b"pubkey2", b"message2", bytes32([1] * 32)), ] @@ -531,15 +531,15 @@ def test_blind_signer_translation_layer() -> None: bytes32([1] * 32), ) - bstl_sum_hints: List[BSTLSumHint] = [ + bstl_sum_hints: list[BSTLSumHint] = [ BSTLSumHint([b"a", b"b", b"c"], b"offset", b"final"), BSTLSumHint([b"c", b"b", b"a"], b"offset2", b"final"), ] - bstl_path_hints: List[BSTLPathHint] = [ + bstl_path_hints: list[BSTLPathHint] = [ BSTLPathHint(b"root1", [uint64(1), uint64(2), uint64(3)]), BSTLPathHint(b"root2", [uint64(4), uint64(5), uint64(6)]), ] - bstl_signing_targets: List[BSTLSigningTarget] = [ + bstl_signing_targets: list[BSTLSigningTarget] = [ BSTLSigningTarget(b"pubkey", b"message", bytes32([0] * 32)), BSTLSigningTarget(b"pubkey2", b"message2", bytes32([1] * 32)), ] diff --git a/chia/_tests/wallet/test_singleton_lifecycle_fast.py b/chia/_tests/wallet/test_singleton_lifecycle_fast.py index c57d3912a43c..7eb710b22b9f 100644 --- a/chia/_tests/wallet/test_singleton_lifecycle_fast.py +++ b/chia/_tests/wallet/test_singleton_lifecycle_fast.py @@ -1,7 +1,7 @@ from __future__ import annotations from dataclasses import dataclass -from typing import Any, Callable, Dict, List, Optional, Tuple, Type, TypeVar, cast, get_args, get_origin +from typing import Any, Callable, Optional, TypeVar, cast, get_args, get_origin from chia_rs import G1Element, G2Element from clvm_tools import binutils @@ -40,7 +40,7 @@ T = TypeVar("T") -def satisfies_hint(obj: T, type_hint: Type[T]) -> bool: +def satisfies_hint(obj: T, type_hint: type[T]) -> bool: """ Check if an object satisfies a type hint. This is a simplified version of `isinstance` that also handles generic types. @@ -75,7 +75,7 @@ def satisfies_hint(obj: T, type_hint: Type[T]) -> bool: class PuzzleDB: def __init__(self) -> None: - self._db: Dict[bytes32, Program] = {} + self._db: dict[bytes32, Program] = {} def add_puzzle(self, puzzle: Program) -> None: self._db[puzzle.get_tree_hash()] = Program.from_bytes(bytes(puzzle)) @@ -84,7 +84,7 @@ def puzzle_for_hash(self, puzzle_hash: bytes32) -> Optional[Program]: return self._db.get(puzzle_hash) -def from_kwargs(kwargs: Dict[str, Any], key: str, type_info: Type[T]) -> T: +def from_kwargs(kwargs: dict[str, Any], key: str, type_info: type[T]) -> T: """Raise an exception if `kwargs[key]` is missing or the wrong type""" if key not in kwargs: raise ValueError(f"`{key}` missing in call to `solve`") @@ -93,7 +93,7 @@ def from_kwargs(kwargs: Dict[str, Any], key: str, type_info: Type[T]) -> T: return cast(T, kwargs[key]) -Solver_F = Callable[["Solver", PuzzleDB, List[Program], Any], Program] +Solver_F = Callable[["Solver", PuzzleDB, list[Program], Any], Program] class Solver: @@ -102,14 +102,14 @@ class Solver: """ def __init__(self) -> None: - self.solvers_by_puzzle_hash: Dict[bytes32, Solver_F] = {} + self.solvers_by_puzzle_hash: dict[bytes32, Solver_F] = {} def register_solver(self, puzzle_hash: bytes32, solver_f: Solver_F) -> None: if puzzle_hash in self.solvers_by_puzzle_hash: raise ValueError(f"solver registered for {puzzle_hash}") self.solvers_by_puzzle_hash[puzzle_hash] = solver_f - def solve(self, puzzle_db: PuzzleDB, puzzle: Program, **kwargs: Dict[str, Any]) -> Program: + def solve(self, puzzle_db: PuzzleDB, puzzle: Program, **kwargs: dict[str, Any]) -> Program: """ The legal values and types for `kwargs` depends on the underlying solver that's invoked. The `kwargs` are passed through to any inner solvers @@ -128,34 +128,34 @@ def solve(self, puzzle_db: PuzzleDB, puzzle: Program, **kwargs: Dict[str, Any]) raise ValueError("can't solve") -def solve_launcher(solver: Solver, puzzle_db: PuzzleDB, args: List[Program], kwargs: Dict[str, Any]) -> Program: +def solve_launcher(solver: Solver, puzzle_db: PuzzleDB, args: list[Program], kwargs: dict[str, Any]) -> Program: launcher_amount = from_kwargs(kwargs, "launcher_amount", uint64) destination_puzzle_hash = from_kwargs(kwargs, "destination_puzzle_hash", bytes32) - metadata = from_kwargs(kwargs, "metadata", List[Tuple[str, str]]) + metadata = from_kwargs(kwargs, "metadata", list[tuple[str, str]]) solution = Program.to([destination_puzzle_hash, launcher_amount, metadata]) return solution -def solve_anyone_can_spend(solver: Solver, puzzle_db: PuzzleDB, args: List[Program], kwargs: Dict[str, Any]) -> Program: +def solve_anyone_can_spend(solver: Solver, puzzle_db: PuzzleDB, args: list[Program], kwargs: dict[str, Any]) -> Program: """ This is the anyone-can-spend puzzle `1`. Note that farmers can easily steal this coin, so don't use it except for testing. """ - conditions = from_kwargs(kwargs, "conditions", List[Program]) + conditions = from_kwargs(kwargs, "conditions", list[Program]) solution = Program.to(conditions) return solution def solve_anyone_can_spend_with_padding( - solver: Solver, puzzle_db: PuzzleDB, args: List[Program], kwargs: Dict[str, Any] + solver: Solver, puzzle_db: PuzzleDB, args: list[Program], kwargs: dict[str, Any] ) -> Program: """This is the puzzle `(a (q . 1) 3)`. It's only for testing.""" - conditions = from_kwargs(kwargs, "conditions", List[Program]) + conditions = from_kwargs(kwargs, "conditions", list[Program]) solution = Program.to((0, conditions)) return solution -def solve_singleton(solver: Solver, puzzle_db: PuzzleDB, args: List[Program], kwargs: Dict[str, Any]) -> Program: +def solve_singleton(solver: Solver, puzzle_db: PuzzleDB, args: list[Program], kwargs: dict[str, Any]) -> Program: """ `lineage_proof`: a `Program` that proves the parent is also a singleton (or the launcher). `coin_amount`: a necessarily-odd value of mojos in this coin. @@ -168,7 +168,7 @@ def solve_singleton(solver: Solver, puzzle_db: PuzzleDB, args: List[Program], kw return solution -def solve_pool_member(solver: Solver, puzzle_db: PuzzleDB, args: List[Program], kwargs: Dict[str, Any]) -> Program: +def solve_pool_member(solver: Solver, puzzle_db: PuzzleDB, args: list[Program], kwargs: dict[str, Any]) -> Program: pool_member_spend_type = from_kwargs(kwargs, "pool_member_spend_type", str) allowable = ["to-waiting-room", "claim-p2-nft"] if pool_member_spend_type not in allowable: @@ -185,7 +185,7 @@ def solve_pool_member(solver: Solver, puzzle_db: PuzzleDB, args: List[Program], def solve_pool_waiting_room( - solver: Solver, puzzle_db: PuzzleDB, args: List[Program], kwargs: Dict[str, Any] + solver: Solver, puzzle_db: PuzzleDB, args: list[Program], kwargs: dict[str, Any] ) -> Program: pool_leaving_spend_type = from_kwargs(kwargs, "pool_leaving_spend_type", str) allowable = ["exit-waiting-room", "claim-p2-nft"] @@ -193,7 +193,7 @@ def solve_pool_waiting_room( raise ValueError(f"`pool_leaving_spend_type` must be one of {'/'.join(allowable)} for POOL_MEMBER puzzle") exit_waiting_room = pool_leaving_spend_type == "exit-waiting-room" if exit_waiting_room: - key_value_list = from_kwargs(kwargs, "key_value_list", List[Tuple[str, str]]) + key_value_list = from_kwargs(kwargs, "key_value_list", list[tuple[str, str]]) destination_puzzle_hash = from_kwargs(kwargs, "destination_puzzle_hash", bytes32) return Program.to([0, 1, key_value_list, destination_puzzle_hash]) # it's an "absorb_pool_reward" type @@ -203,7 +203,7 @@ def solve_pool_waiting_room( return solution -def solve_p2_singleton(solver: Solver, puzzle_db: PuzzleDB, args: List[Program], kwargs: Dict[str, Any]) -> Program: +def solve_p2_singleton(solver: Solver, puzzle_db: PuzzleDB, args: list[Program], kwargs: dict[str, Any]) -> Program: p2_singleton_spend_type = from_kwargs(kwargs, "p2_singleton_spend_type", str) allowable = ["claim-p2-nft", "delayed-spend"] if p2_singleton_spend_type not in allowable: @@ -261,7 +261,7 @@ def coin_spend_for_conditions(self, puzzle_db: PuzzleDB, **kwargs: object) -> Co ) return make_spend(coin, puzzle_reveal, solution) - def update_state(self, puzzle_db: PuzzleDB, removals: List[CoinSpend]) -> int: + def update_state(self, puzzle_db: PuzzleDB, removals: list[CoinSpend]) -> int: state_change_count = 0 current_coin_name = self.current_state.name() for coin_spend in removals: @@ -303,9 +303,9 @@ def launcher_conditions_and_spend_bundle( parent_coin_id: bytes32, launcher_amount: uint64, initial_singleton_inner_puzzle: Program, - metadata: List[Tuple[str, str]], + metadata: list[tuple[str, str]], launcher_puzzle: Program, -) -> Tuple[bytes32, List[Program], WalletSpendBundle]: +) -> tuple[bytes32, list[Program], WalletSpendBundle]: puzzle_db.add_puzzle(launcher_puzzle) launcher_puzzle_hash = launcher_puzzle.get_tree_hash() launcher_coin = Coin(parent_coin_id, launcher_puzzle_hash, launcher_amount) @@ -374,7 +374,7 @@ def p2_singleton_puzzle_hash_for_launcher( def claim_p2_singleton( puzzle_db: PuzzleDB, singleton_wallet: SingletonWallet, p2_singleton_coin: Coin -) -> Tuple[CoinSpend, List[Program]]: +) -> tuple[CoinSpend, list[Program]]: inner_puzzle = singleton_wallet.inner_puzzle(puzzle_db) assert inner_puzzle inner_puzzle_hash = inner_puzzle.get_tree_hash() @@ -432,7 +432,7 @@ def assert_coin_spent(coin_store: CoinStore, coin: Coin, is_spent: bool = True) def spend_coin_to_singleton( puzzle_db: PuzzleDB, launcher_puzzle: Program, coin_store: CoinStore, now: CoinTimestamp -) -> Tuple[List[Coin], List[CoinSpend]]: +) -> tuple[list[Coin], list[CoinSpend]]: farmed_coin_amount = 100000 metadata = [("foo", "bar")] @@ -468,7 +468,7 @@ def spend_coin_to_singleton( return additions, removals -def find_interesting_singletons(removals: List[CoinSpend]) -> List[SingletonWallet]: +def find_interesting_singletons(removals: list[CoinSpend]) -> list[SingletonWallet]: singletons = [] for coin_spend in removals: if coin_spend.coin.puzzle_hash == LAUNCHER_PUZZLE_HASH: @@ -484,7 +484,7 @@ def find_interesting_singletons(removals: List[CoinSpend]) -> List[SingletonWall return singletons -def filter_p2_singleton(puzzle_db: PuzzleDB, additions: List[Coin]) -> List[Coin]: +def filter_p2_singleton(puzzle_db: PuzzleDB, additions: list[Coin]) -> list[Coin]: r = [] for coin in additions: puzzle = puzzle_db.puzzle_for_hash(coin.puzzle_hash) diff --git a/chia/_tests/wallet/test_transaction_store.py b/chia/_tests/wallet/test_transaction_store.py index f62d96c597a1..6b20b2a7adc0 100644 --- a/chia/_tests/wallet/test_transaction_store.py +++ b/chia/_tests/wallet/test_transaction_store.py @@ -2,7 +2,7 @@ import dataclasses import random -from typing import Any, List, Optional, Tuple +from typing import Any, Optional import pytest @@ -210,7 +210,7 @@ async def test_get_farming_rewards(seeded_random: random.Random) -> None: async with DBConnection(1) as db_wrapper: store = await WalletTransactionStore.create(db_wrapper) - test_trs: List[TransactionRecord] = [] + test_trs: list[TransactionRecord] = [] # tr1 is type OUTGOING_TX for conf in [True, False]: @@ -320,7 +320,7 @@ async def test_all_transactions_for_wallet(seeded_random: random.Random) -> None async with DBConnection(1) as db_wrapper: store = await WalletTransactionStore.create(db_wrapper) - test_trs: List[TransactionRecord] = [] + test_trs: list[TransactionRecord] = [] for wallet_id in [1, 2]: for type in [ TransactionType.INCOMING_TX, @@ -356,7 +356,7 @@ async def test_all_transactions_for_wallet(seeded_random: random.Random) -> None assert await store.get_all_transactions_for_wallet(2, TransactionType.OUTGOING_TRADE) == [test_trs[11]] -def cmp(lhs: List[Any], rhs: List[Any]) -> bool: +def cmp(lhs: list[Any], rhs: list[Any]) -> bool: if len(rhs) != len(lhs): return False @@ -371,7 +371,7 @@ async def test_get_all_transactions(seeded_random: random.Random) -> None: async with DBConnection(1) as db_wrapper: store = await WalletTransactionStore.create(db_wrapper) - test_trs: List[TransactionRecord] = [] + test_trs: list[TransactionRecord] = [] assert await store.get_all_transactions() == [] for wallet_id in [1, 2, 3, 4]: test_trs.append(dataclasses.replace(tr1, name=bytes32.random(seeded_random), wallet_id=uint32(wallet_id))) @@ -388,7 +388,7 @@ async def test_get_transaction_above(seeded_random: random.Random) -> None: async with DBConnection(1) as db_wrapper: store = await WalletTransactionStore.create(db_wrapper) - test_trs: List[TransactionRecord] = [] + test_trs: list[TransactionRecord] = [] assert await store.get_transaction_above(uint32(0)) == [] for height in range(10): test_trs.append( @@ -440,7 +440,7 @@ async def test_rollback_to_block(seeded_random: random.Random) -> None: async with DBConnection(1) as db_wrapper: store = await WalletTransactionStore.create(db_wrapper) - test_trs: List[TransactionRecord] = [] + test_trs: list[TransactionRecord] = [] for height in range(10): test_trs.append( dataclasses.replace(tr1, name=bytes32.random(seeded_random), confirmed_at_height=uint32(height)) @@ -798,7 +798,7 @@ async def test_get_not_sent(seeded_random: random.Random) -> None: @pytest.mark.anyio async def test_transaction_record_is_valid() -> None: - invalid_attempts: List[Tuple[str, uint8, Optional[str]]] = [] + invalid_attempts: list[tuple[str, uint8, Optional[str]]] = [] # The tx should be valid as long as we don't have minimum_send_attempts failed attempts while len(invalid_attempts) < minimum_send_attempts: assert dataclasses.replace(tr1, sent_to=invalid_attempts).is_valid() diff --git a/chia/_tests/wallet/test_util.py b/chia/_tests/wallet/test_util.py index 1b4a1c559a50..5ea8aa4bbe8e 100644 --- a/chia/_tests/wallet/test_util.py +++ b/chia/_tests/wallet/test_util.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Any, Dict, List, Tuple +from typing import Any import pytest @@ -125,7 +125,7 @@ def test_list_to_binary_tree() -> None: ), ], ) -def test_lineage_proof_varargs(serializations: Tuple[Tuple[Any, ...], Program, List[LineageProofField]]) -> None: +def test_lineage_proof_varargs(serializations: tuple[tuple[Any, ...], Program, list[LineageProofField]]) -> None: var_args, expected_program, lp_fields = serializations assert LineageProof(*var_args).to_program() == expected_program assert LineageProof(*var_args) == LineageProof.from_program(expected_program, lp_fields) @@ -164,7 +164,7 @@ def test_lineage_proof_varargs(serializations: Tuple[Tuple[Any, ...], Program, L ), ], ) -def test_lineage_proof_kwargs(serializations: Tuple[Dict[str, Any], Program, List[LineageProofField]]) -> None: +def test_lineage_proof_kwargs(serializations: tuple[dict[str, Any], Program, list[LineageProofField]]) -> None: kwargs, expected_program, lp_fields = serializations assert LineageProof(**kwargs).to_program() == expected_program assert LineageProof(**kwargs) == LineageProof.from_program(expected_program, lp_fields) diff --git a/chia/_tests/wallet/test_wallet.py b/chia/_tests/wallet/test_wallet.py index 1b8705ca80d1..21302c456f32 100644 --- a/chia/_tests/wallet/test_wallet.py +++ b/chia/_tests/wallet/test_wallet.py @@ -2,7 +2,7 @@ import dataclasses from pathlib import Path -from typing import Any, Dict, List, Optional, Tuple +from typing import Any, Optional import pytest from chia_rs import AugSchemeMPL, G1Element, G2Element @@ -1112,7 +1112,7 @@ async def test_clawback_resync(self, self_hostname: str, wallet_environments: Wa await time_out_assert(20, wsm_1.coin_store.count_small_unspent, 0, 1000, CoinType.CLAWBACK) await time_out_assert(20, wsm_2.coin_store.count_small_unspent, 0, 1000, CoinType.CLAWBACK) - before_txs: Dict[str, Dict[TransactionType, int]] = {"sender": {}, "recipient": {}} + before_txs: dict[str, dict[TransactionType, int]] = {"sender": {}, "recipient": {}} before_txs["sender"][TransactionType.INCOMING_CLAWBACK_SEND] = ( await wsm_1.tx_store.get_transaction_count_for_wallet( 1, type_filter=TransactionTypeFilter.include([TransactionType.INCOMING_CLAWBACK_SEND]) @@ -1159,7 +1159,7 @@ async def test_clawback_resync(self, self_hostname: str, wallet_environments: Wa wsm_1 = env_1.node.wallet_state_manager wsm_2 = env_2.node.wallet_state_manager - after_txs: Dict[str, Dict[TransactionType, int]] = {"sender": {}, "recipient": {}} + after_txs: dict[str, dict[TransactionType, int]] = {"sender": {}, "recipient": {}} after_txs["sender"][TransactionType.INCOMING_CLAWBACK_SEND] = ( await wsm_1.tx_store.get_transaction_count_for_wallet( 1, type_filter=TransactionTypeFilter.include([TransactionType.INCOMING_CLAWBACK_SEND]) @@ -1260,7 +1260,7 @@ async def test_wallet_coinbase_reorg(self, wallet_environments: WalletTestFramew @pytest.mark.anyio async def test_wallet_send_to_three_peers( self, - three_sim_two_wallets: Tuple[List[FullNodeSimulator], List[Tuple[WalletNode, ChiaServer]], BlockTools], + three_sim_two_wallets: tuple[list[FullNodeSimulator], list[tuple[WalletNode, ChiaServer]], BlockTools], trusted: bool, self_hostname: str, ) -> None: @@ -2083,7 +2083,7 @@ async def test_wallet_transaction_options(self, wallet_environments: WalletTestF def test_get_wallet_db_path_v2_r1() -> None: root_path: Path = Path("/x/y/z/.chia/mainnet").resolve() - config: Dict[str, Any] = { + config: dict[str, Any] = { "database_path": "wallet/db/blockchain_wallet_v2_r1_CHALLENGE_KEY.sqlite", "selected_network": "mainnet", } @@ -2095,7 +2095,7 @@ def test_get_wallet_db_path_v2_r1() -> None: def test_get_wallet_db_path_v2() -> None: root_path: Path = Path("/x/y/z/.chia/mainnet").resolve() - config: Dict[str, Any] = { + config: dict[str, Any] = { "database_path": "wallet/db/blockchain_wallet_v2_CHALLENGE_KEY.sqlite", "selected_network": "mainnet", } @@ -2107,7 +2107,7 @@ def test_get_wallet_db_path_v2() -> None: def test_get_wallet_db_path_v1() -> None: root_path: Path = Path("/x/y/z/.chia/mainnet").resolve() - config: Dict[str, Any] = { + config: dict[str, Any] = { "database_path": "wallet/db/blockchain_wallet_v1_CHALLENGE_KEY.sqlite", "selected_network": "mainnet", } @@ -2119,7 +2119,7 @@ def test_get_wallet_db_path_v1() -> None: def test_get_wallet_db_path_testnet() -> None: root_path: Path = Path("/x/y/z/.chia/testnet").resolve() - config: Dict[str, Any] = { + config: dict[str, Any] = { "database_path": "wallet/db/blockchain_wallet_v2_CHALLENGE_KEY.sqlite", "selected_network": "testnet", } @@ -2131,7 +2131,7 @@ def test_get_wallet_db_path_testnet() -> None: @pytest.mark.anyio async def test_wallet_has_no_server( - simulator_and_wallet: Tuple[List[FullNodeSimulator], List[Tuple[WalletNode, ChiaServer]], BlockTools], + simulator_and_wallet: tuple[list[FullNodeSimulator], list[tuple[WalletNode, ChiaServer]], BlockTools], ) -> None: full_nodes, wallets, bt = simulator_and_wallet wallet_node, wallet_server = wallets[0] diff --git a/chia/_tests/wallet/test_wallet_action_scope.py b/chia/_tests/wallet/test_wallet_action_scope.py index 5b01e3da017b..2ac0e8d56fba 100644 --- a/chia/_tests/wallet/test_wallet_action_scope.py +++ b/chia/_tests/wallet/test_wallet_action_scope.py @@ -1,7 +1,7 @@ from __future__ import annotations from dataclasses import dataclass -from typing import List, Optional, Tuple +from typing import Optional import pytest from chia_rs import G2Element @@ -36,18 +36,18 @@ def test_back_and_forth_serialization() -> None: @dataclass class MockWalletStateManager: most_recent_call: Optional[ - Tuple[List[TransactionRecord], bool, bool, bool, List[SigningResponse], List[WalletSpendBundle]] + tuple[list[TransactionRecord], bool, bool, bool, list[SigningResponse], list[WalletSpendBundle]] ] = None async def add_pending_transactions( self, - txs: List[TransactionRecord], + txs: list[TransactionRecord], push: bool, merge_spends: bool, sign: bool, - additional_signing_responses: List[SigningResponse], - extra_spends: List[WalletSpendBundle], - ) -> List[TransactionRecord]: + additional_signing_responses: list[SigningResponse], + extra_spends: list[WalletSpendBundle], + ) -> list[TransactionRecord]: self.most_recent_call = (txs, push, merge_spends, sign, additional_signing_responses, extra_spends) return txs diff --git a/chia/_tests/wallet/test_wallet_blockchain.py b/chia/_tests/wallet/test_wallet_blockchain.py index 86392193216e..a2d92be0b08b 100644 --- a/chia/_tests/wallet/test_wallet_blockchain.py +++ b/chia/_tests/wallet/test_wallet_blockchain.py @@ -1,7 +1,5 @@ from __future__ import annotations -from typing import List - import pytest from chia._tests.util.db_connection import DBConnection @@ -21,7 +19,7 @@ @pytest.mark.anyio @pytest.mark.standard_block_tools async def test_wallet_blockchain( - simulator_and_wallet: OldSimulatorsAndWallets, default_1000_blocks: List[FullBlock] + simulator_and_wallet: OldSimulatorsAndWallets, default_1000_blocks: list[FullBlock] ) -> None: [full_node_api], [(wallet_node, _)], bt = simulator_and_wallet @@ -75,7 +73,7 @@ async def test_wallet_blockchain( assert peak_block is not None assert peak_block.height == 505 - header_blocks: List[HeaderBlock] = [] + header_blocks: list[HeaderBlock] = [] for block in default_1000_blocks: header_block = get_block_header(block, [], []) header_blocks.append(header_block) diff --git a/chia/_tests/wallet/test_wallet_coin_store.py b/chia/_tests/wallet/test_wallet_coin_store.py index c51e0acfcece..a71e0b53e110 100644 --- a/chia/_tests/wallet/test_wallet_coin_store.py +++ b/chia/_tests/wallet/test_wallet_coin_store.py @@ -2,7 +2,7 @@ import random from dataclasses import dataclass, field, replace -from typing import Dict, List, Optional, Tuple +from typing import Optional import pytest @@ -118,7 +118,7 @@ def get_dummy_record(wallet_id: int, seeded_random: random.Random) -> WalletCoin @dataclass class DummyWalletCoinRecords: seeded_random: random.Random - records_per_wallet: Dict[int, List[WalletCoinRecord]] = field(default_factory=dict) + records_per_wallet: dict[int, list[WalletCoinRecord]] = field(default_factory=dict) def generate(self, wallet_id: int, count: int) -> None: records = self.records_per_wallet.setdefault(wallet_id, []) @@ -367,7 +367,7 @@ async def test_delete_coin_record() -> None: ).records == [record_2, record_3, record_4, record_5, record_6, record_7] -get_coin_records_offset_limit_tests: List[Tuple[GetCoinRecords, List[WalletCoinRecord]]] = [ +get_coin_records_offset_limit_tests: list[tuple[GetCoinRecords, list[WalletCoinRecord]]] = [ (GetCoinRecords(offset=uint32(0), limit=uint32(0)), []), (GetCoinRecords(offset=uint32(10), limit=uint32(0)), []), (GetCoinRecords(offset=uint32(0), limit=uint32(1)), [record_8]), @@ -379,7 +379,7 @@ async def test_delete_coin_record() -> None: (GetCoinRecords(coin_type=uint8(CoinType.CLAWBACK), offset=uint32(5), limit=uint32(1)), []), ] -get_coin_records_wallet_id_tests: List[Tuple[GetCoinRecords, List[WalletCoinRecord]]] = [ +get_coin_records_wallet_id_tests: list[tuple[GetCoinRecords, list[WalletCoinRecord]]] = [ ( GetCoinRecords(), [record_8, record_9, record_1, record_2, record_3, record_4, record_5, record_6, record_7], @@ -389,13 +389,13 @@ async def test_delete_coin_record() -> None: (GetCoinRecords(wallet_id=uint32(2)), [record_9, record_6, record_7]), ] -get_coin_records_wallet_type_tests: List[Tuple[GetCoinRecords, List[WalletCoinRecord]]] = [ +get_coin_records_wallet_type_tests: list[tuple[GetCoinRecords, list[WalletCoinRecord]]] = [ (GetCoinRecords(wallet_id=uint32(2), wallet_type=uint8(WalletType.STANDARD_WALLET)), [record_9, record_6]), (GetCoinRecords(wallet_type=uint8(WalletType.POOLING_WALLET)), [record_7]), (GetCoinRecords(wallet_type=uint8(WalletType.NFT)), []), ] -get_coin_records_coin_type_tests: List[Tuple[GetCoinRecords, List[WalletCoinRecord]]] = [ +get_coin_records_coin_type_tests: list[tuple[GetCoinRecords, list[WalletCoinRecord]]] = [ (GetCoinRecords(wallet_id=uint32(0), coin_type=uint8(CoinType.NORMAL)), [record_1, record_2, record_3, record_4]), (GetCoinRecords(wallet_id=uint32(0), coin_type=uint8(CoinType.CLAWBACK)), []), (GetCoinRecords(wallet_id=uint32(1), coin_type=uint8(CoinType.NORMAL)), [record_5]), @@ -403,7 +403,7 @@ async def test_delete_coin_record() -> None: (GetCoinRecords(coin_type=uint8(CoinType.CLAWBACK)), [record_8, record_9]), ] -get_coin_records_coin_id_filter_tests: List[Tuple[GetCoinRecords, List[WalletCoinRecord]]] = [ +get_coin_records_coin_id_filter_tests: list[tuple[GetCoinRecords, list[WalletCoinRecord]]] = [ (GetCoinRecords(coin_id_filter=HashFilter.include([])), []), (GetCoinRecords(coin_id_filter=HashFilter.include([coin_1.name(), coin_4.name()])), [record_1, record_4]), (GetCoinRecords(coin_id_filter=HashFilter.include([coin_1.name(), coin_4.puzzle_hash])), [record_1]), @@ -416,7 +416,7 @@ async def test_delete_coin_record() -> None: ] -get_coin_records_puzzle_hash_filter_tests: List[Tuple[GetCoinRecords, List[WalletCoinRecord]]] = [ +get_coin_records_puzzle_hash_filter_tests: list[tuple[GetCoinRecords, list[WalletCoinRecord]]] = [ (GetCoinRecords(puzzle_hash_filter=HashFilter.include([])), []), ( GetCoinRecords(puzzle_hash_filter=HashFilter.include([coin_1.puzzle_hash, coin_4.puzzle_hash])), @@ -439,7 +439,7 @@ async def test_delete_coin_record() -> None: ), ] -get_coin_records_parent_coin_id_filter_tests: List[Tuple[GetCoinRecords, List[WalletCoinRecord]]] = [ +get_coin_records_parent_coin_id_filter_tests: list[tuple[GetCoinRecords, list[WalletCoinRecord]]] = [ (GetCoinRecords(parent_coin_id_filter=HashFilter.include([])), []), ( GetCoinRecords(parent_coin_id_filter=HashFilter.include([coin_5.name(), coin_4.parent_coin_info])), @@ -463,7 +463,7 @@ async def test_delete_coin_record() -> None: ), ] -get_coin_records_amount_filter_tests: List[Tuple[GetCoinRecords, List[WalletCoinRecord]]] = [ +get_coin_records_amount_filter_tests: list[tuple[GetCoinRecords, list[WalletCoinRecord]]] = [ (GetCoinRecords(amount_filter=AmountFilter.include([])), []), ( GetCoinRecords(amount_filter=AmountFilter.include([uint64(12312)])), @@ -491,7 +491,7 @@ async def test_delete_coin_record() -> None: ), ] -get_coin_records_amount_range_tests: List[Tuple[GetCoinRecords, List[WalletCoinRecord]]] = [ +get_coin_records_amount_range_tests: list[tuple[GetCoinRecords, list[WalletCoinRecord]]] = [ (GetCoinRecords(amount_range=UInt64Range(start=uint64(1000000))), []), (GetCoinRecords(amount_range=UInt64Range(stop=uint64(0))), []), ( @@ -504,7 +504,7 @@ async def test_delete_coin_record() -> None: (GetCoinRecords(amount_range=UInt64Range(start=uint64(5), stop=uint64(12311))), [record_2]), ] -get_coin_records_confirmed_range_tests: List[Tuple[GetCoinRecords, List[WalletCoinRecord]]] = [ +get_coin_records_confirmed_range_tests: list[tuple[GetCoinRecords, list[WalletCoinRecord]]] = [ (GetCoinRecords(confirmed_range=UInt32Range(start=uint32(20))), []), (GetCoinRecords(confirmed_range=UInt32Range(stop=uint32(0))), []), (GetCoinRecords(confirmed_range=UInt32Range(start=uint32(2), stop=uint32(1))), []), @@ -521,7 +521,7 @@ async def test_delete_coin_record() -> None: ), ] -get_coin_records_spent_range_tests: List[Tuple[GetCoinRecords, List[WalletCoinRecord]]] = [ +get_coin_records_spent_range_tests: list[tuple[GetCoinRecords, list[WalletCoinRecord]]] = [ (GetCoinRecords(spent_range=UInt32Range(start=uint32(20))), []), (GetCoinRecords(spent_range=UInt32Range(stop=uint32(0))), [record_8, record_1, record_2, record_5, record_7]), (GetCoinRecords(spent_range=UInt32Range(start=uint32(2), stop=uint32(1))), []), @@ -530,7 +530,7 @@ async def test_delete_coin_record() -> None: (GetCoinRecords(spent_range=UInt32Range(start=uint32(5), stop=uint32(15))), [record_3, record_4, record_6]), ] -get_coin_records_order_tests: List[Tuple[GetCoinRecords, List[WalletCoinRecord]]] = [ +get_coin_records_order_tests: list[tuple[GetCoinRecords, list[WalletCoinRecord]]] = [ ( GetCoinRecords(wallet_id=uint32(0), order=uint8(CoinRecordOrder.spent_height)), [record_1, record_2, record_3, record_4], @@ -544,7 +544,7 @@ async def test_delete_coin_record() -> None: ), ] -get_coin_records_reverse_tests: List[Tuple[GetCoinRecords, List[WalletCoinRecord]]] = [ +get_coin_records_reverse_tests: list[tuple[GetCoinRecords, list[WalletCoinRecord]]] = [ ( GetCoinRecords(wallet_id=uint32(0), order=uint8(CoinRecordOrder.spent_height), reverse=True), [record_4, record_3, record_1, record_2], @@ -567,7 +567,7 @@ async def test_delete_coin_record() -> None: ), ] -get_coin_records_include_total_count_tests: List[Tuple[GetCoinRecords, int, List[WalletCoinRecord]]] = [ +get_coin_records_include_total_count_tests: list[tuple[GetCoinRecords, int, list[WalletCoinRecord]]] = [ (GetCoinRecords(wallet_id=uint32(0), include_total_count=True), 4, [record_1, record_2, record_3, record_4]), ( GetCoinRecords(wallet_id=uint32(0), offset=uint32(1), limit=uint32(2), include_total_count=True), @@ -579,7 +579,7 @@ async def test_delete_coin_record() -> None: (GetCoinRecords(wallet_type=uint8(WalletType.POOLING_WALLET), include_total_count=True), 1, [record_7]), ] -get_coin_records_mixed_tests: List[Tuple[GetCoinRecords, int, List[WalletCoinRecord]]] = [ +get_coin_records_mixed_tests: list[tuple[GetCoinRecords, int, list[WalletCoinRecord]]] = [ ( GetCoinRecords( offset=uint32(2), @@ -632,7 +632,7 @@ async def test_delete_coin_record() -> None: async def run_get_coin_records_test( - request: GetCoinRecords, total_count: Optional[int], coin_records: List[WalletCoinRecord] + request: GetCoinRecords, total_count: Optional[int], coin_records: list[WalletCoinRecord] ) -> None: async with DBConnection(1) as db_wrapper: store = await WalletCoinStore.create(db_wrapper) @@ -665,38 +665,38 @@ async def run_get_coin_records_test( @pytest.mark.parametrize("coins_request, records", [*get_coin_records_offset_limit_tests]) @pytest.mark.anyio -async def test_get_coin_records_offset_limit(coins_request: GetCoinRecords, records: List[WalletCoinRecord]) -> None: +async def test_get_coin_records_offset_limit(coins_request: GetCoinRecords, records: list[WalletCoinRecord]) -> None: await run_get_coin_records_test(coins_request, None, records) @pytest.mark.parametrize("coins_request, records", [*get_coin_records_wallet_id_tests]) @pytest.mark.anyio -async def test_get_coin_records_wallet_id(coins_request: GetCoinRecords, records: List[WalletCoinRecord]) -> None: +async def test_get_coin_records_wallet_id(coins_request: GetCoinRecords, records: list[WalletCoinRecord]) -> None: await run_get_coin_records_test(coins_request, None, records) @pytest.mark.parametrize("coins_request, records", [*get_coin_records_wallet_type_tests]) @pytest.mark.anyio -async def test_get_coin_records_wallet_type(coins_request: GetCoinRecords, records: List[WalletCoinRecord]) -> None: +async def test_get_coin_records_wallet_type(coins_request: GetCoinRecords, records: list[WalletCoinRecord]) -> None: await run_get_coin_records_test(coins_request, None, records) @pytest.mark.parametrize("coins_request, records", [*get_coin_records_coin_type_tests]) @pytest.mark.anyio -async def test_get_coin_records_coin_type(coins_request: GetCoinRecords, records: List[WalletCoinRecord]) -> None: +async def test_get_coin_records_coin_type(coins_request: GetCoinRecords, records: list[WalletCoinRecord]) -> None: await run_get_coin_records_test(coins_request, None, records) @pytest.mark.parametrize("coins_request, records", [*get_coin_records_coin_id_filter_tests]) @pytest.mark.anyio -async def test_get_coin_records_coin_id_filter(coins_request: GetCoinRecords, records: List[WalletCoinRecord]) -> None: +async def test_get_coin_records_coin_id_filter(coins_request: GetCoinRecords, records: list[WalletCoinRecord]) -> None: await run_get_coin_records_test(coins_request, None, records) @pytest.mark.parametrize("coins_request, records", [*get_coin_records_puzzle_hash_filter_tests]) @pytest.mark.anyio async def test_get_coin_records_puzzle_hash_filter( - coins_request: GetCoinRecords, records: List[WalletCoinRecord] + coins_request: GetCoinRecords, records: list[WalletCoinRecord] ) -> None: await run_get_coin_records_test(coins_request, None, records) @@ -704,51 +704,51 @@ async def test_get_coin_records_puzzle_hash_filter( @pytest.mark.parametrize("coins_request, records", [*get_coin_records_parent_coin_id_filter_tests]) @pytest.mark.anyio async def test_get_coin_records_parent_coin_id_filter( - coins_request: GetCoinRecords, records: List[WalletCoinRecord] + coins_request: GetCoinRecords, records: list[WalletCoinRecord] ) -> None: await run_get_coin_records_test(coins_request, None, records) @pytest.mark.parametrize("coins_request, records", [*get_coin_records_amount_filter_tests]) @pytest.mark.anyio -async def test_get_coin_records_amount_filter(coins_request: GetCoinRecords, records: List[WalletCoinRecord]) -> None: +async def test_get_coin_records_amount_filter(coins_request: GetCoinRecords, records: list[WalletCoinRecord]) -> None: await run_get_coin_records_test(coins_request, None, records) @pytest.mark.parametrize("coins_request, records", [*get_coin_records_confirmed_range_tests]) @pytest.mark.anyio -async def test_get_coin_records_confirmed_range(coins_request: GetCoinRecords, records: List[WalletCoinRecord]) -> None: +async def test_get_coin_records_confirmed_range(coins_request: GetCoinRecords, records: list[WalletCoinRecord]) -> None: await run_get_coin_records_test(coins_request, None, records) @pytest.mark.parametrize("coins_request, records", [*get_coin_records_spent_range_tests]) @pytest.mark.anyio -async def test_get_coin_records_spent_range(coins_request: GetCoinRecords, records: List[WalletCoinRecord]) -> None: +async def test_get_coin_records_spent_range(coins_request: GetCoinRecords, records: list[WalletCoinRecord]) -> None: await run_get_coin_records_test(coins_request, None, records) @pytest.mark.parametrize("coins_request, records", [*get_coin_records_amount_range_tests]) @pytest.mark.anyio -async def test_get_coin_records_amount_range(coins_request: GetCoinRecords, records: List[WalletCoinRecord]) -> None: +async def test_get_coin_records_amount_range(coins_request: GetCoinRecords, records: list[WalletCoinRecord]) -> None: await run_get_coin_records_test(coins_request, None, records) @pytest.mark.parametrize("coins_request, records", [*get_coin_records_order_tests]) @pytest.mark.anyio -async def test_get_coin_records_order(coins_request: GetCoinRecords, records: List[WalletCoinRecord]) -> None: +async def test_get_coin_records_order(coins_request: GetCoinRecords, records: list[WalletCoinRecord]) -> None: await run_get_coin_records_test(coins_request, None, records) @pytest.mark.parametrize("coins_request, records", [*get_coin_records_reverse_tests]) @pytest.mark.anyio -async def test_get_coin_records_reverse(coins_request: GetCoinRecords, records: List[WalletCoinRecord]) -> None: +async def test_get_coin_records_reverse(coins_request: GetCoinRecords, records: list[WalletCoinRecord]) -> None: await run_get_coin_records_test(coins_request, None, records) @pytest.mark.parametrize("coins_request, total_count, records", [*get_coin_records_include_total_count_tests]) @pytest.mark.anyio async def test_get_coin_records_total_count( - coins_request: GetCoinRecords, total_count: int, records: List[WalletCoinRecord] + coins_request: GetCoinRecords, total_count: int, records: list[WalletCoinRecord] ) -> None: await run_get_coin_records_test(coins_request, total_count, records) @@ -756,7 +756,7 @@ async def test_get_coin_records_total_count( @pytest.mark.parametrize("coins_request, total_count, records", [*get_coin_records_mixed_tests]) @pytest.mark.anyio async def test_get_coin_records_mixed( - coins_request: GetCoinRecords, total_count: int, records: List[WalletCoinRecord] + coins_request: GetCoinRecords, total_count: int, records: list[WalletCoinRecord] ) -> None: await run_get_coin_records_test(coins_request, total_count, records) diff --git a/chia/_tests/wallet/test_wallet_node.py b/chia/_tests/wallet/test_wallet_node.py index d45050cc497c..16f3779f94e5 100644 --- a/chia/_tests/wallet/test_wallet_node.py +++ b/chia/_tests/wallet/test_wallet_node.py @@ -5,7 +5,7 @@ import time import types from pathlib import Path -from typing import Any, Dict, List, Optional +from typing import Any, Optional import pytest from chia_rs import G1Element, PrivateKey @@ -112,7 +112,7 @@ async def test_get_private_key_missing_key_use_default( async def test_get_public_key(root_path_populated_with_config: Path, get_temp_keyring: Keychain) -> None: root_path: Path = root_path_populated_with_config keychain: Keychain = get_temp_keyring - config: Dict[str, Any] = load_config(root_path, "config.yaml", "wallet") + config: dict[str, Any] = load_config(root_path, "config.yaml", "wallet") node: WalletNode = WalletNode(config, root_path, test_constants, keychain) pk: G1Element = keychain.add_key( "c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", @@ -132,7 +132,7 @@ async def test_get_public_key(root_path_populated_with_config: Path, get_temp_ke async def test_get_public_key_default_key(root_path_populated_with_config: Path, get_temp_keyring: Keychain) -> None: root_path: Path = root_path_populated_with_config keychain: Keychain = get_temp_keyring - config: Dict[str, Any] = load_config(root_path, "config.yaml", "wallet") + config: dict[str, Any] = load_config(root_path, "config.yaml", "wallet") node: WalletNode = WalletNode(config, root_path, test_constants, keychain) pk: G1Element = keychain.add_key( "c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", @@ -168,7 +168,7 @@ async def test_get_public_key_missing_key( ) -> None: root_path: Path = root_path_populated_with_config keychain: Keychain = get_temp_keyring # empty keyring - config: Dict[str, Any] = load_config(root_path, "config.yaml", "wallet") + config: dict[str, Any] = load_config(root_path, "config.yaml", "wallet") node: WalletNode = WalletNode(config, root_path, test_constants, keychain) # Keyring is empty, so requesting a key by fingerprint or None should return None @@ -183,7 +183,7 @@ async def test_get_public_key_missing_key_use_default( ) -> None: root_path: Path = root_path_populated_with_config keychain: Keychain = get_temp_keyring - config: Dict[str, Any] = load_config(root_path, "config.yaml", "wallet") + config: dict[str, Any] = load_config(root_path, "config.yaml", "wallet") node: WalletNode = WalletNode(config, root_path, test_constants, keychain) pk: G1Element = keychain.add_key( "c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", @@ -493,7 +493,7 @@ async def test_unique_puzzle_hash_subscriptions(simulator_and_wallet: OldSimulat @pytest.mark.anyio @pytest.mark.standard_block_tools async def test_get_balance( - simulator_and_wallet: OldSimulatorsAndWallets, self_hostname: str, default_400_blocks: List[FullBlock] + simulator_and_wallet: OldSimulatorsAndWallets, self_hostname: str, default_400_blocks: list[FullBlock] ) -> None: [full_node_api], [(wallet_node, wallet_server)], bt = simulator_and_wallet full_node_server = full_node_api.full_node.server @@ -722,7 +722,7 @@ async def validate_received_state_from_peer(*args: Any) -> bool: @pytest.mark.anyio @pytest.mark.standard_block_tools async def test_start_with_multiple_key_types( - simulator_and_wallet: OldSimulatorsAndWallets, self_hostname: str, default_400_blocks: List[FullBlock] + simulator_and_wallet: OldSimulatorsAndWallets, self_hostname: str, default_400_blocks: list[FullBlock] ) -> None: [full_node_api], [(wallet_node, wallet_server)], bt = simulator_and_wallet @@ -753,7 +753,7 @@ async def restart_with_fingerprint(fingerprint: Optional[int]) -> None: @pytest.mark.anyio @pytest.mark.standard_block_tools async def test_start_with_multiple_keys( - simulator_and_wallet: OldSimulatorsAndWallets, self_hostname: str, default_400_blocks: List[FullBlock] + simulator_and_wallet: OldSimulatorsAndWallets, self_hostname: str, default_400_blocks: list[FullBlock] ) -> None: [full_node_api], [(wallet_node, wallet_server)], bt = simulator_and_wallet diff --git a/chia/_tests/wallet/test_wallet_retry.py b/chia/_tests/wallet/test_wallet_retry.py index 6f6e90ed161e..e9f7acc8702a 100644 --- a/chia/_tests/wallet/test_wallet_retry.py +++ b/chia/_tests/wallet/test_wallet_retry.py @@ -1,7 +1,7 @@ from __future__ import annotations import asyncio -from typing import Any, List, Tuple +from typing import Any import pytest @@ -36,7 +36,7 @@ def evict_from_pool(node: FullNodeAPI, sb: WalletSpendBundle) -> None: @pytest.mark.anyio async def test_wallet_tx_retry( - setup_two_nodes_and_wallet_fast_retry: Tuple[List[FullNodeSimulator], List[Tuple[Any, Any]], BlockTools], + setup_two_nodes_and_wallet_fast_retry: tuple[list[FullNodeSimulator], list[tuple[Any, Any]], BlockTools], self_hostname: str, ) -> None: wait_secs = 20 diff --git a/chia/_tests/wallet/test_wallet_state_manager.py b/chia/_tests/wallet/test_wallet_state_manager.py index 5b35023e41ed..e0df7b793325 100644 --- a/chia/_tests/wallet/test_wallet_state_manager.py +++ b/chia/_tests/wallet/test_wallet_state_manager.py @@ -1,7 +1,7 @@ from __future__ import annotations +from collections.abc import AsyncIterator from contextlib import asynccontextmanager -from typing import AsyncIterator, List import pytest from chia_rs import G2Element @@ -139,7 +139,7 @@ async def test_commit_transactions_to_db(wallet_environments: WalletTestFramewor created_txs = action_scope.side_effects.transactions - def flatten_spend_bundles(txs: List[TransactionRecord]) -> List[WalletSpendBundle]: + def flatten_spend_bundles(txs: list[TransactionRecord]) -> list[WalletSpendBundle]: return [tx.spend_bundle for tx in txs if tx.spend_bundle is not None] assert ( diff --git a/chia/_tests/wallet/test_wallet_utils.py b/chia/_tests/wallet/test_wallet_utils.py index b83c6e71b699..5e09ac9e6b26 100644 --- a/chia/_tests/wallet/test_wallet_utils.py +++ b/chia/_tests/wallet/test_wallet_utils.py @@ -1,6 +1,7 @@ from __future__ import annotations -from typing import Collection, Dict, List, Optional, Set, Tuple +from collections.abc import Collection +from typing import Optional import pytest from chia_rs import Coin, CoinState @@ -22,7 +23,7 @@ ] -def assert_race_cache(cache: PeerRequestCache, expected_entries: Dict[int, Set[CoinState]]) -> None: +def assert_race_cache(cache: PeerRequestCache, expected_entries: dict[int, set[CoinState]]) -> None: for i in range(100): if i in expected_entries: assert cache.get_race_cache(i) == expected_entries[i], f"failed for {i}" @@ -39,7 +40,7 @@ def dummy_coin_state(*, created_height: Optional[int], spent_height: Optional[in ) -def heights(coin_states: Collection[CoinState]) -> List[Tuple[Optional[int], Optional[int]]]: +def heights(coin_states: Collection[CoinState]) -> list[tuple[Optional[int], Optional[int]]]: return [(coin_state.created_height, coin_state.spent_height) for coin_state in coin_states] @@ -67,7 +68,7 @@ def test_sort_coin_states() -> None: def test_add_states_to_race_cache() -> None: cache = PeerRequestCache() - expected_entries: Dict[int, Set[CoinState]] = {} + expected_entries: dict[int, set[CoinState]] = {} assert_race_cache(cache, expected_entries) # Repeated adding of the same coin state should not have any impact diff --git a/chia/_tests/wallet/vc_wallet/test_cr_outer_puzzle.py b/chia/_tests/wallet/vc_wallet/test_cr_outer_puzzle.py index 277570fb4040..4bb7aa6dfc4f 100644 --- a/chia/_tests/wallet/vc_wallet/test_cr_outer_puzzle.py +++ b/chia/_tests/wallet/vc_wallet/test_cr_outer_puzzle.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import List, Optional +from typing import Optional from chia.types.blockchain_format.coin import Coin from chia.types.blockchain_format.program import Program @@ -20,7 +20,7 @@ def test_cat_outer_puzzle() -> None: - authorized_providers: List[bytes32] = [bytes32([0] * 32), bytes32([0] * 32)] + authorized_providers: list[bytes32] = [bytes32([0] * 32), bytes32([0] * 32)] proofs_checker: Program = Program.to(None) ACS: Program = Program.to(1) cr_puzzle: Program = construct_cr_layer(authorized_providers, proofs_checker, ACS) diff --git a/chia/_tests/wallet/vc_wallet/test_vc_lifecycle.py b/chia/_tests/wallet/vc_wallet/test_vc_lifecycle.py index 85cef36c6b6b..aaf29a6f3abe 100644 --- a/chia/_tests/wallet/vc_wallet/test_vc_lifecycle.py +++ b/chia/_tests/wallet/vc_wallet/test_vc_lifecycle.py @@ -1,7 +1,7 @@ from __future__ import annotations import itertools -from typing import List, Optional, Tuple +from typing import Optional import pytest from chia_rs import G2Element @@ -105,7 +105,7 @@ async def test_covenant_layer(cost_logger: CostLogger) -> None: ].coin # With the honest coin, attempt to spend the non-eve case too soon - result: Tuple[MempoolInclusionStatus, Optional[Err]] = await client.push_tx( + result: tuple[MempoolInclusionStatus, Optional[Err]] = await client.push_tx( WalletSpendBundle( [ make_spend( @@ -213,7 +213,7 @@ async def test_did_tp(cost_logger: CostLogger) -> None: bad_data: bytes32 = bytes32([0] * 32) # Try to update metadata and tp without any announcement - result: Tuple[MempoolInclusionStatus, Optional[Err]] = await client.push_tx( + result: tuple[MempoolInclusionStatus, Optional[Err]] = await client.push_tx( WalletSpendBundle( [ make_spend( @@ -327,7 +327,7 @@ async def test_viral_backdoor(cost_logger: CostLogger) -> None: )[0].coin # Reveal the wrong puzzle - result: Tuple[MempoolInclusionStatus, Optional[Err]] = await client.push_tx( + result: tuple[MempoolInclusionStatus, Optional[Err]] = await client.push_tx( WalletSpendBundle( [ make_spend( @@ -399,7 +399,7 @@ async def test_viral_backdoor(cost_logger: CostLogger) -> None: @pytest.mark.parametrize("num_proofs", range(1, 6)) async def test_proofs_checker(cost_logger: CostLogger, num_proofs: int) -> None: async with sim_and_client() as (sim, client): - flags: List[str] = [str(i) for i in range(0, num_proofs)] + flags: list[str] = [str(i) for i in range(0, num_proofs)] proofs_checker: ProofsChecker = ProofsChecker(flags) # (mod (PROOFS_CHECKER proofs) (if (a PROOFS_CHECKER (list proofs)) () (x))) @@ -415,7 +415,7 @@ async def test_proofs_checker(cost_logger: CostLogger, num_proofs: int) -> None: block_height: uint32 = sim.block_height for i, proof_list in enumerate(itertools.permutations(flags, num_proofs)): - result: Tuple[MempoolInclusionStatus, Optional[Err]] = await client.push_tx( + result: tuple[MempoolInclusionStatus, Optional[Err]] = await client.push_tx( cost_logger.add_cost( f"Proofs Checker only - num_proofs: {num_proofs} - permutation: {i}", WalletSpendBundle( @@ -506,7 +506,7 @@ async def test_vc_lifecycle(test_syncing: bool, cost_logger: CostLogger) -> None ACS_PH, [bytes32([0] * 32)], ) - result: Tuple[MempoolInclusionStatus, Optional[Err]] = await client.push_tx( + result: tuple[MempoolInclusionStatus, Optional[Err]] = await client.push_tx( cost_logger.add_cost( "Launch VC", WalletSpendBundle( @@ -612,7 +612,7 @@ async def test_vc_lifecycle(test_syncing: bool, cost_logger: CostLogger) -> None else: proofs = ["test", "test2"] proofs_checker: ProofsChecker = ProofsChecker(proofs) - AUTHORIZED_PROVIDERS: List[bytes32] = [launcher_id] + AUTHORIZED_PROVIDERS: list[bytes32] = [launcher_id] dpuz_1, launch_crcat_spend_1, cr_1 = CRCAT.launch( cr_coin_1, Payment(ACS_PH, uint64(cr_coin_1.amount), []), diff --git a/chia/_tests/wallet/vc_wallet/test_vc_wallet.py b/chia/_tests/wallet/vc_wallet/test_vc_wallet.py index 659bb9998715..893ef54cfc67 100644 --- a/chia/_tests/wallet/vc_wallet/test_vc_wallet.py +++ b/chia/_tests/wallet/vc_wallet/test_vc_wallet.py @@ -1,7 +1,8 @@ from __future__ import annotations import dataclasses -from typing import Any, Awaitable, Callable, List, Optional +from collections.abc import Awaitable +from typing import Any, Callable, Optional import pytest from chia_rs import G2Element @@ -39,7 +40,7 @@ async def mint_cr_cat( wallet_node_0: WalletNode, client_0: WalletRpcClient, full_node_api: FullNodeSimulator, - authorized_providers: List[bytes32] = [], + authorized_providers: list[bytes32] = [], tail: Program = Program.to(None), proofs_checker: ProofsChecker = ProofsChecker(["foo", "bar"]), ) -> None: diff --git a/chia/_tests/wallet/wallet_block_tools.py b/chia/_tests/wallet/wallet_block_tools.py index 3c64c764b2db..2fded0a56b40 100644 --- a/chia/_tests/wallet/wallet_block_tools.py +++ b/chia/_tests/wallet/wallet_block_tools.py @@ -1,7 +1,7 @@ from __future__ import annotations import time -from typing import Any, Dict, List, Optional, Tuple +from typing import Any, Optional from chia_rs import G1Element, G2Element, compute_merkle_set_root from chiabip158 import PyBIP158 @@ -50,14 +50,14 @@ class WalletBlockTools(BlockTools): def get_consecutive_blocks( self, num_blocks: int, - block_list_input: Optional[List[FullBlock]] = None, + block_list_input: Optional[list[FullBlock]] = None, *, farmer_reward_puzzle_hash: Optional[bytes32] = None, pool_reward_puzzle_hash: Optional[bytes32] = None, transaction_data: Optional[SpendBundle] = None, genesis_timestamp: Optional[uint64] = None, **kwargs: Any, # We're overriding so there's many arguments no longer used. - ) -> List[FullBlock]: + ) -> list[FullBlock]: assert num_blocks > 0 constants = self.constants @@ -67,7 +67,7 @@ def get_consecutive_blocks( if block_list_input is None: block_list_input = [] - blocks: Dict[bytes32, BlockRecord] + blocks: dict[bytes32, BlockRecord] if len(block_list_input) == 0: height_to_hash = {} blocks = {} @@ -130,10 +130,10 @@ def get_consecutive_blocks( def load_block_list( - block_list: List[FullBlock], constants: ConsensusConstants -) -> Tuple[Dict[uint32, bytes32], uint64, Dict[bytes32, BlockRecord]]: - height_to_hash: Dict[uint32, bytes32] = {} - blocks: Dict[bytes32, BlockRecord] = {} + block_list: list[FullBlock], constants: ConsensusConstants +) -> tuple[dict[uint32, bytes32], uint64, dict[bytes32, BlockRecord]]: + height_to_hash: dict[uint32, bytes32] = {} + blocks: dict[bytes32, BlockRecord] = {} sub_slot_iters = constants.SUB_SLOT_ITERS_STARTING for full_block in block_list: if full_block.height != 0 and len(full_block.finished_sub_slots) > 0: @@ -154,8 +154,8 @@ def finish_block( constants: ConsensusConstants, unfinished_block: UnfinishedBlock, prev_block: Optional[BlockRecord], - blocks: Dict[bytes32, BlockRecord], -) -> Tuple[FullBlock, BlockRecord]: + blocks: dict[bytes32, BlockRecord], +) -> tuple[FullBlock, BlockRecord]: if prev_block is None: new_weight = uint128(1) new_height = uint32(0) @@ -199,15 +199,15 @@ def finish_block( def get_full_block_and_block_record( constants: ConsensusConstants, - blocks: Dict[bytes32, BlockRecord], + blocks: dict[bytes32, BlockRecord], last_timestamp: uint64, farmer_reward_puzzlehash: bytes32, pool_target: PoolTarget, prev_block: Optional[BlockRecord], block_generator: Optional[BlockGenerator], - additions: List[Coin], - removals: List[Coin], -) -> Tuple[FullBlock, BlockRecord, float]: + additions: list[Coin], + removals: list[Coin], +) -> tuple[FullBlock, BlockRecord, float]: timestamp = last_timestamp + 20 if prev_block is None: height: uint32 = uint32(0) @@ -241,9 +241,9 @@ def get_full_block_and_block_record( ) ) - byte_array_tx: List[bytearray] = [] - removal_ids: List[bytes32] = [] - puzzlehash_coin_map: Dict[bytes32, List[bytes32]] = {} + byte_array_tx: list[bytearray] = [] + removal_ids: list[bytes32] = [] + puzzlehash_coin_map: dict[bytes32, list[bytes32]] = {} for coin in additions: puzzlehash_coin_map.setdefault(coin.puzzle_hash, []) puzzlehash_coin_map[coin.puzzle_hash].append(coin.name()) @@ -255,7 +255,7 @@ def get_full_block_and_block_record( bip158: PyBIP158 = PyBIP158(byte_array_tx) filter_hash = std_hash(bytes(bip158.GetEncoded())) - additions_merkle_items: List[bytes32] = [] + additions_merkle_items: list[bytes32] = [] for puzzle, coin_ids in puzzlehash_coin_map.items(): additions_merkle_items.append(puzzle) additions_merkle_items.append(hash_coin_ids(coin_ids)) diff --git a/chia/_tests/weight_proof/test_weight_proof.py b/chia/_tests/weight_proof/test_weight_proof.py index 86a51e4fddb5..e17232e3180d 100644 --- a/chia/_tests/weight_proof/test_weight_proof.py +++ b/chia/_tests/weight_proof/test_weight_proof.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Dict, List, Optional, Tuple +from typing import Optional import pytest @@ -22,14 +22,14 @@ async def load_blocks_dont_validate( - blocks: List[FullBlock], constants: ConsensusConstants -) -> Tuple[ - Dict[bytes32, HeaderBlock], Dict[uint32, bytes32], Dict[bytes32, BlockRecord], Dict[uint32, SubEpochSummary] + blocks: list[FullBlock], constants: ConsensusConstants +) -> tuple[ + dict[bytes32, HeaderBlock], dict[uint32, bytes32], dict[bytes32, BlockRecord], dict[uint32, SubEpochSummary] ]: - header_cache: Dict[bytes32, HeaderBlock] = {} - height_to_hash: Dict[uint32, bytes32] = {} - sub_blocks: Dict[bytes32, BlockRecord] = {} - sub_epoch_summaries: Dict[uint32, SubEpochSummary] = {} + header_cache: dict[bytes32, HeaderBlock] = {} + height_to_hash: dict[uint32, bytes32] = {} + sub_blocks: dict[bytes32, BlockRecord] = {} + sub_epoch_summaries: dict[uint32, SubEpochSummary] = {} prev_block = None difficulty = constants.DIFFICULTY_STARTING sub_slot_iters = constants.SUB_SLOT_ITERS_STARTING @@ -82,15 +82,15 @@ async def load_blocks_dont_validate( async def _test_map_summaries( - blocks: List[FullBlock], - header_cache: Dict[bytes32, HeaderBlock], - height_to_hash: Dict[uint32, bytes32], - sub_blocks: Dict[bytes32, BlockRecord], - summaries: Dict[uint32, SubEpochSummary], + blocks: list[FullBlock], + header_cache: dict[bytes32, HeaderBlock], + height_to_hash: dict[uint32, bytes32], + sub_blocks: dict[bytes32, BlockRecord], + summaries: dict[uint32, SubEpochSummary], constants: ConsensusConstants, ) -> None: curr = sub_blocks[blocks[-1].header_hash] - orig_summaries: Dict[int, SubEpochSummary] = {} + orig_summaries: dict[int, SubEpochSummary] = {} while curr.height > 0: if curr.sub_epoch_summary_included is not None: orig_summaries[curr.height] = curr.sub_epoch_summary_included @@ -114,7 +114,7 @@ async def _test_map_summaries( class TestWeightProof: @pytest.mark.anyio async def test_weight_proof_map_summaries_1( - self, default_400_blocks: List[FullBlock], blockchain_constants: ConsensusConstants + self, default_400_blocks: list[FullBlock], blockchain_constants: ConsensusConstants ) -> None: header_cache, height_to_hash, sub_blocks, summaries = await load_blocks_dont_validate( default_400_blocks, blockchain_constants @@ -125,7 +125,7 @@ async def test_weight_proof_map_summaries_1( @pytest.mark.anyio async def test_weight_proof_map_summaries_2( - self, default_1000_blocks: List[FullBlock], blockchain_constants: ConsensusConstants + self, default_1000_blocks: list[FullBlock], blockchain_constants: ConsensusConstants ) -> None: header_cache, height_to_hash, sub_blocks, summaries = await load_blocks_dont_validate( default_1000_blocks, blockchain_constants @@ -136,7 +136,7 @@ async def test_weight_proof_map_summaries_2( @pytest.mark.anyio async def test_weight_proof_summaries_1000_blocks( - self, default_1000_blocks: List[FullBlock], blockchain_constants: ConsensusConstants + self, default_1000_blocks: list[FullBlock], blockchain_constants: ConsensusConstants ) -> None: blocks = default_1000_blocks header_cache, height_to_hash, sub_blocks, summaries = await load_blocks_dont_validate( @@ -158,7 +158,7 @@ async def test_weight_proof_summaries_1000_blocks( @pytest.mark.anyio async def test_weight_proof_bad_peak_hash( - self, default_1000_blocks: List[FullBlock], blockchain_constants: ConsensusConstants + self, default_1000_blocks: list[FullBlock], blockchain_constants: ConsensusConstants ) -> None: blocks = default_1000_blocks header_cache, height_to_hash, sub_blocks, summaries = await load_blocks_dont_validate( @@ -173,7 +173,7 @@ async def test_weight_proof_bad_peak_hash( @pytest.mark.anyio @pytest.mark.skip(reason="broken") async def test_weight_proof_from_genesis( - self, default_400_blocks: List[FullBlock], blockchain_constants: ConsensusConstants + self, default_400_blocks: list[FullBlock], blockchain_constants: ConsensusConstants ) -> None: blocks = default_400_blocks header_cache, height_to_hash, sub_blocks, summaries = await load_blocks_dont_validate( @@ -188,7 +188,7 @@ async def test_weight_proof_from_genesis( assert wp is not None @pytest.mark.anyio - async def test_weight_proof_edge_cases(self, bt: BlockTools, default_400_blocks: List[FullBlock]) -> None: + async def test_weight_proof_edge_cases(self, bt: BlockTools, default_400_blocks: list[FullBlock]) -> None: blocks = default_400_blocks blocks = bt.get_consecutive_blocks( @@ -275,7 +275,7 @@ async def test_weight_proof_edge_cases(self, bt: BlockTools, default_400_blocks: @pytest.mark.anyio async def test_weight_proof1000( - self, default_1000_blocks: List[FullBlock], blockchain_constants: ConsensusConstants + self, default_1000_blocks: list[FullBlock], blockchain_constants: ConsensusConstants ) -> None: blocks = default_1000_blocks header_cache, height_to_hash, sub_blocks, summaries = await load_blocks_dont_validate( @@ -294,7 +294,7 @@ async def test_weight_proof1000( @pytest.mark.anyio async def test_weight_proof1000_pre_genesis_empty_slots( - self, pre_genesis_empty_slots_1000_blocks: List[FullBlock], blockchain_constants: ConsensusConstants + self, pre_genesis_empty_slots_1000_blocks: list[FullBlock], blockchain_constants: ConsensusConstants ) -> None: blocks = pre_genesis_empty_slots_1000_blocks header_cache, height_to_hash, sub_blocks, summaries = await load_blocks_dont_validate( @@ -314,7 +314,7 @@ async def test_weight_proof1000_pre_genesis_empty_slots( @pytest.mark.anyio async def test_weight_proof10000__blocks_compact( - self, default_10000_blocks_compact: List[FullBlock], blockchain_constants: ConsensusConstants + self, default_10000_blocks_compact: list[FullBlock], blockchain_constants: ConsensusConstants ) -> None: blocks = default_10000_blocks_compact header_cache, height_to_hash, sub_blocks, summaries = await load_blocks_dont_validate( @@ -333,7 +333,7 @@ async def test_weight_proof10000__blocks_compact( @pytest.mark.anyio async def test_weight_proof1000_partial_blocks_compact( - self, bt: BlockTools, default_10000_blocks_compact: List[FullBlock] + self, bt: BlockTools, default_10000_blocks_compact: list[FullBlock] ) -> None: blocks = bt.get_consecutive_blocks( 100, @@ -355,7 +355,7 @@ async def test_weight_proof1000_partial_blocks_compact( @pytest.mark.anyio async def test_weight_proof10000( - self, default_10000_blocks: List[FullBlock], blockchain_constants: ConsensusConstants + self, default_10000_blocks: list[FullBlock], blockchain_constants: ConsensusConstants ) -> None: blocks = default_10000_blocks header_cache, height_to_hash, sub_blocks, summaries = await load_blocks_dont_validate( @@ -375,7 +375,7 @@ async def test_weight_proof10000( @pytest.mark.anyio async def test_check_num_of_samples( - self, default_10000_blocks: List[FullBlock], blockchain_constants: ConsensusConstants + self, default_10000_blocks: list[FullBlock], blockchain_constants: ConsensusConstants ) -> None: blocks = default_10000_blocks header_cache, height_to_hash, sub_blocks, summaries = await load_blocks_dont_validate( @@ -396,7 +396,7 @@ async def test_check_num_of_samples( @pytest.mark.anyio async def test_weight_proof_extend_no_ses( - self, default_1000_blocks: List[FullBlock], blockchain_constants: ConsensusConstants + self, default_1000_blocks: list[FullBlock], blockchain_constants: ConsensusConstants ) -> None: blocks = default_1000_blocks header_cache, height_to_hash, sub_blocks, summaries = await load_blocks_dont_validate( @@ -424,7 +424,7 @@ async def test_weight_proof_extend_no_ses( @pytest.mark.anyio async def test_weight_proof_extend_new_ses( - self, default_1000_blocks: List[FullBlock], blockchain_constants: ConsensusConstants + self, default_1000_blocks: list[FullBlock], blockchain_constants: ConsensusConstants ) -> None: blocks = default_1000_blocks header_cache, height_to_hash, sub_blocks, summaries = await load_blocks_dont_validate( @@ -468,7 +468,7 @@ async def test_weight_proof_extend_new_ses( @pytest.mark.anyio async def test_weight_proof_extend_multiple_ses( - self, default_1000_blocks: List[FullBlock], blockchain_constants: ConsensusConstants + self, default_1000_blocks: list[FullBlock], blockchain_constants: ConsensusConstants ) -> None: blocks = default_1000_blocks header_cache, height_to_hash, sub_blocks, summaries = await load_blocks_dont_validate( diff --git a/chia/clvm/spend_sim.py b/chia/clvm/spend_sim.py index 92457ee0fa58..548d8e594d93 100644 --- a/chia/clvm/spend_sim.py +++ b/chia/clvm/spend_sim.py @@ -3,10 +3,11 @@ import contextlib import json import random +from collections.abc import AsyncIterator from contextlib import asynccontextmanager from dataclasses import dataclass from pathlib import Path -from typing import Any, AsyncIterator, Callable, Dict, List, Optional, Tuple, Type, TypeVar +from typing import Any, Callable, Optional, TypeVar import anyio @@ -51,7 +52,7 @@ @asynccontextmanager async def sim_and_client( db_path: Optional[Path] = None, defaults: ConsensusConstants = DEFAULT_CONSTANTS, pass_prefarm: bool = True -) -> AsyncIterator[Tuple[SpendSim, SimClient]]: +) -> AsyncIterator[tuple[SpendSim, SimClient]]: async with SpendSim.managed(db_path, defaults) as sim: client: SimClient = SimClient(sim) if pass_prefarm: @@ -61,8 +62,8 @@ async def sim_and_client( class CostLogger: def __init__(self) -> None: - self.cost_dict: Dict[str, int] = {} - self.cost_dict_no_puzs: Dict[str, int] = {} + self.cost_dict: dict[str, int] = {} + self.cost_dict_no_puzs: dict[str, int] = {} def add_cost(self, descriptor: str, spend_bundle: T_SpendBundle) -> T_SpendBundle: program: BlockGenerator = simple_solution_generator(spend_bundle) @@ -102,7 +103,7 @@ class SimFullBlock(Streamable): @streamable @dataclass(frozen=True) class SimBlockRecord(Streamable): - reward_claims_incorporated: List[Coin] + reward_claims_incorporated: list[Coin] height: uint32 prev_transaction_block_height: uint32 timestamp: uint64 @@ -111,7 +112,7 @@ class SimBlockRecord(Streamable): prev_transaction_block_hash: bytes32 @classmethod - def create(cls: Type[_T_SimBlockRecord], rci: List[Coin], height: uint32, timestamp: uint64) -> _T_SimBlockRecord: + def create(cls: type[_T_SimBlockRecord], rci: list[Coin], height: uint32, timestamp: uint64) -> _T_SimBlockRecord: prev_transaction_block_height = uint32(height - 1 if height > 0 else 0) return cls( rci, @@ -129,8 +130,8 @@ def create(cls: Type[_T_SimBlockRecord], rci: List[Coin], height: uint32, timest class SimStore(Streamable): timestamp: uint64 block_height: uint32 - block_records: List[SimBlockRecord] - blocks: List[SimFullBlock] + block_records: list[SimBlockRecord] + blocks: list[SimFullBlock] _T_SpendSim = TypeVar("_T_SpendSim", bound="SpendSim") @@ -140,8 +141,8 @@ class SpendSim: db_wrapper: DBWrapper2 coin_store: CoinStore mempool_manager: MempoolManager - block_records: List[SimBlockRecord] - blocks: List[SimFullBlock] + block_records: list[SimBlockRecord] + blocks: list[SimFullBlock] timestamp: uint64 block_height: uint32 defaults: ConsensusConstants @@ -150,7 +151,7 @@ class SpendSim: @classmethod @contextlib.asynccontextmanager async def managed( - cls: Type[_T_SpendSim], db_path: Optional[Path] = None, defaults: ConsensusConstants = DEFAULT_CONSTANTS + cls: type[_T_SpendSim], db_path: Optional[Path] = None, defaults: ConsensusConstants = DEFAULT_CONSTANTS ) -> AsyncIterator[_T_SpendSim]: self = cls() if db_path is None: @@ -196,7 +197,7 @@ async def managed( ) await c.close() - async def new_peak(self, spent_coins_ids: Optional[List[bytes32]]) -> None: + async def new_peak(self, spent_coins_ids: Optional[list[bytes32]]) -> None: await self.mempool_manager.new_peak(self.block_records[-1], spent_coins_ids) def new_coin_record(self, coin: Coin, coinbase: bool = False) -> CoinRecord: @@ -208,7 +209,7 @@ def new_coin_record(self, coin: Coin, coinbase: bool = False) -> CoinRecord: self.timestamp, ) - async def all_non_reward_coins(self) -> List[Coin]: + async def all_non_reward_coins(self) -> list[Coin]: coins = set() async with self.db_wrapper.reader_no_transaction() as conn: cursor = await conn.execute( @@ -231,7 +232,7 @@ async def farm_block( self, puzzle_hash: bytes32 = bytes32(b"0" * 32), item_inclusion_filter: Optional[Callable[[bytes32], bool]] = None, - ) -> Tuple[List[Coin], List[Coin]]: + ) -> tuple[list[Coin], list[Coin]]: # Fees get calculated fees = uint64(0) for item in self.mempool_manager.mempool.all_items(): @@ -257,8 +258,8 @@ async def farm_block( # Coin store gets updated generator_bundle: Optional[SpendBundle] = None - return_additions: List[Coin] = [] - return_removals: List[Coin] = [] + return_additions: list[Coin] = [] + return_removals: list[Coin] = [] spent_coins_ids = None if (len(self.block_records) > 0) and (self.mempool_manager.mempool.size() > 0): peak = self.mempool_manager.peak @@ -274,7 +275,7 @@ async def farm_block( generator_bundle = bundle for spend in generator_bundle.coin_spends: hint_dict, _ = compute_spend_hints_and_additions(spend) - hints: List[Tuple[bytes32, bytes]] = [] + hints: list[tuple[bytes32, bytes]] = [] hint_obj: HintedCoin for coin_name, hint_obj in hint_dict.items(): if hint_obj.hint is not None: @@ -334,7 +335,7 @@ class SimClient: def __init__(self, service: SpendSim) -> None: self.service = service - async def push_tx(self, spend_bundle: SpendBundle) -> Tuple[MempoolInclusionStatus, Optional[Err]]: + async def push_tx(self, spend_bundle: SpendBundle) -> tuple[MempoolInclusionStatus, Optional[Err]]: try: spend_bundle_id = spend_bundle.name() sbc = await self.service.mempool_manager.pre_validate_spendbundle(spend_bundle, spend_bundle_id) @@ -351,12 +352,12 @@ async def get_coin_record_by_name(self, name: bytes32) -> Optional[CoinRecord]: async def get_coin_records_by_names( self, - names: List[bytes32], + names: list[bytes32], start_height: Optional[int] = None, end_height: Optional[int] = None, include_spent_coins: bool = False, - ) -> List[CoinRecord]: - kwargs: Dict[str, Any] = {"include_spent_coins": include_spent_coins, "names": names} + ) -> list[CoinRecord]: + kwargs: dict[str, Any] = {"include_spent_coins": include_spent_coins, "names": names} if start_height is not None: kwargs["start_height"] = start_height if end_height is not None: @@ -365,12 +366,12 @@ async def get_coin_records_by_names( async def get_coin_records_by_parent_ids( self, - parent_ids: List[bytes32], + parent_ids: list[bytes32], start_height: Optional[int] = None, end_height: Optional[int] = None, include_spent_coins: bool = False, - ) -> List[CoinRecord]: - kwargs: Dict[str, Any] = {"include_spent_coins": include_spent_coins, "parent_ids": parent_ids} + ) -> list[CoinRecord]: + kwargs: dict[str, Any] = {"include_spent_coins": include_spent_coins, "parent_ids": parent_ids} if start_height is not None: kwargs["start_height"] = start_height if end_height is not None: @@ -383,8 +384,8 @@ async def get_coin_records_by_puzzle_hash( include_spent_coins: bool = True, start_height: Optional[int] = None, end_height: Optional[int] = None, - ) -> List[CoinRecord]: - kwargs: Dict[str, Any] = {"include_spent_coins": include_spent_coins, "puzzle_hash": puzzle_hash} + ) -> list[CoinRecord]: + kwargs: dict[str, Any] = {"include_spent_coins": include_spent_coins, "puzzle_hash": puzzle_hash} if start_height is not None: kwargs["start_height"] = start_height if end_height is not None: @@ -393,12 +394,12 @@ async def get_coin_records_by_puzzle_hash( async def get_coin_records_by_puzzle_hashes( self, - puzzle_hashes: List[bytes32], + puzzle_hashes: list[bytes32], include_spent_coins: bool = True, start_height: Optional[int] = None, end_height: Optional[int] = None, - ) -> List[CoinRecord]: - kwargs: Dict[str, Any] = {"include_spent_coins": include_spent_coins, "puzzle_hashes": puzzle_hashes} + ) -> list[CoinRecord]: + kwargs: dict[str, Any] = {"include_spent_coins": include_spent_coins, "puzzle_hashes": puzzle_hashes} if start_height is not None: kwargs["start_height"] = start_height if end_height is not None: @@ -411,7 +412,7 @@ async def get_block_record_by_height(self, height: uint32) -> SimBlockRecord: async def get_block_record(self, header_hash: bytes32) -> SimBlockRecord: return list(filter(lambda block: block.header_hash == header_hash, self.service.block_records))[0] - async def get_block_records(self, start: uint32, end: uint32) -> List[SimBlockRecord]: + async def get_block_records(self, start: uint32, end: uint32) -> list[SimBlockRecord]: return list(filter(lambda block: (block.height >= start) and (block.height < end), self.service.block_records)) async def get_block(self, header_hash: bytes32) -> SimFullBlock: @@ -422,16 +423,16 @@ async def get_block(self, header_hash: bytes32) -> SimFullBlock: block: SimFullBlock = list(filter(lambda block: block.height == block_height, self.service.blocks))[0] return block - async def get_all_block(self, start: uint32, end: uint32) -> List[SimFullBlock]: + async def get_all_block(self, start: uint32, end: uint32) -> list[SimFullBlock]: return list(filter(lambda block: (block.height >= start) and (block.height < end), self.service.blocks)) - async def get_additions_and_removals(self, header_hash: bytes32) -> Tuple[List[CoinRecord], List[CoinRecord]]: + async def get_additions_and_removals(self, header_hash: bytes32) -> tuple[list[CoinRecord], list[CoinRecord]]: selected_block: SimBlockRecord = list( filter(lambda br: br.header_hash == header_hash, self.service.block_records) )[0] block_height: uint32 = selected_block.height - additions: List[CoinRecord] = await self.service.coin_store.get_coins_added_at_height(block_height) - removals: List[CoinRecord] = await self.service.coin_store.get_coins_removed_at_height(block_height) + additions: list[CoinRecord] = await self.service.coin_store.get_coins_added_at_height(block_height) + removals: list[CoinRecord] = await self.service.coin_store.get_coins_removed_at_height(block_height) return additions, removals async def get_puzzle_and_solution(self, coin_id: bytes32, height: uint32) -> CoinSpend: @@ -443,16 +444,16 @@ async def get_puzzle_and_solution(self, coin_id: bytes32, height: uint32) -> Coi spend_info = get_puzzle_and_solution_for_coin(generator, coin_record.coin, height, self.service.defaults) return CoinSpend(coin_record.coin, spend_info.puzzle, spend_info.solution) - async def get_all_mempool_tx_ids(self) -> List[bytes32]: + async def get_all_mempool_tx_ids(self) -> list[bytes32]: return self.service.mempool_manager.mempool.all_item_ids() - async def get_all_mempool_items(self) -> Dict[bytes32, MempoolItem]: + async def get_all_mempool_items(self) -> dict[bytes32, MempoolItem]: spends = {} for item in self.service.mempool_manager.mempool.all_items(): spends[item.name] = item return spends - async def get_mempool_item_by_tx_id(self, tx_id: bytes32) -> Optional[Dict[str, Any]]: + async def get_mempool_item_by_tx_id(self, tx_id: bytes32) -> Optional[dict[str, Any]]: item = self.service.mempool_manager.get_mempool_item(tx_id) if item is None: return None @@ -465,13 +466,13 @@ async def get_coin_records_by_hint( include_spent_coins: bool = True, start_height: Optional[int] = None, end_height: Optional[int] = None, - ) -> List[CoinRecord]: + ) -> list[CoinRecord]: """ Retrieves coins by hint, by default returns unspent coins. """ - names: List[bytes32] = await self.service.hint_store.get_coin_ids(hint) + names: list[bytes32] = await self.service.hint_store.get_coin_ids(hint) - kwargs: Dict[str, Any] = { + kwargs: dict[str, Any] = { "include_spent_coins": False, "names": names, } diff --git a/chia/cmds/beta.py b/chia/cmds/beta.py index f96dd5ab6f6f..31c436fa36af 100644 --- a/chia/cmds/beta.py +++ b/chia/cmds/beta.py @@ -3,7 +3,7 @@ import zipfile from datetime import datetime from pathlib import Path -from typing import List, Optional +from typing import Optional import click @@ -150,7 +150,7 @@ def prepare_submission_cmd(ctx: click.Context) -> None: prepare_result / f"submission_{prepare_result.name}__{datetime.now().strftime('%m_%d_%Y__%H_%M_%S')}.zip" ) - def add_files(paths: List[Path]) -> int: + def add_files(paths: list[Path]) -> int: added = 0 for path in paths: if path.name.startswith("."): diff --git a/chia/cmds/beta_funcs.py b/chia/cmds/beta_funcs.py index 6eb086262e64..9dcd358bff29 100644 --- a/chia/cmds/beta_funcs.py +++ b/chia/cmds/beta_funcs.py @@ -3,7 +3,7 @@ import os import sys from pathlib import Path -from typing import Any, Callable, Dict, List, Optional +from typing import Any, Callable, Optional from chia.cmds.cmds_util import format_bytes, prompt_yes_no, validate_directory_writable from chia.util.beta_metrics import metrics_log_interval_max, metrics_log_interval_min @@ -15,7 +15,7 @@ def default_beta_root_path() -> Path: return Path(os.path.expanduser(os.getenv("CHIA_BETA_ROOT", "~/chia-beta-test"))).resolve() -def warn_if_beta_enabled(config: Dict[str, Any]) -> None: +def warn_if_beta_enabled(config: dict[str, Any]) -> None: if config.get("beta", {}).get("enabled", False): print("\nWARNING: beta test mode is enabled. Run `chia beta disable` if this is unintentional.\n") @@ -84,7 +84,7 @@ def prompt_for_metrics_log_interval(default_interval: int) -> int: return interval -def update_beta_config(enabled: bool, path: Path, metrics_log_interval: int, config: Dict[str, Any]) -> None: +def update_beta_config(enabled: bool, path: Path, metrics_log_interval: int, config: dict[str, Any]) -> None: if "beta" not in config: config["beta"] = {} @@ -121,7 +121,7 @@ def prepare_chia_blockchain_log(path: Path) -> None: print(f" - {path.name}") -def prepare_logs(prepare_path: Path, prepare_callback: Callable[[Path], None]) -> List[Path]: +def prepare_logs(prepare_path: Path, prepare_callback: Callable[[Path], None]) -> list[Path]: result = [path for path in prepare_path.iterdir()] if prepare_path.exists() else [] if len(result): print(f"\nPreparing {prepare_path.name!r} logs:") diff --git a/chia/cmds/check_wallet_db.py b/chia/cmds/check_wallet_db.py index 5ba145182e95..f0f0a4523407 100644 --- a/chia/cmds/check_wallet_db.py +++ b/chia/cmds/check_wallet_db.py @@ -3,9 +3,10 @@ import asyncio import sys from collections import defaultdict +from collections.abc import Iterable from pathlib import Path from sqlite3 import Row -from typing import Any, Dict, Iterable, List, Optional, Set +from typing import Any, Optional from chia.util.collection import find_duplicates from chia.util.db_synchronous import db_synchronous_on @@ -48,18 +49,18 @@ def _validate_args_addresses_used(wallet_id: int, last_index: int, last_hardened raise ValueError(f"Invalid argument: noncontiguous derivation_index at {last_index} wallet_id={wallet_id}") -def check_addresses_used_contiguous(derivation_paths: List[DerivationPath]) -> List[str]: +def check_addresses_used_contiguous(derivation_paths: list[DerivationPath]) -> list[str]: """ The used column for addresses in the derivation_paths table should be a zero or greater run of 1's, followed by a zero or greater run of 0's. There should be no used derivations after seeing a used derivation. """ - errors: List[str] = [] + errors: list[str] = [] for wallet_id, dps in dp_by_wallet_id(derivation_paths).items(): saw_unused = False - bad_used_values: Set[int] = set() - ordering_errors: List[str] = [] + bad_used_values: set[int] = set() + ordering_errors: list[str] = [] # last_index = None # last_hardened = None for dp in dps: @@ -89,7 +90,7 @@ def check_addresses_used_contiguous(derivation_paths: List[DerivationPath]) -> L return errors -def check_for_gaps(array: List[int], start: int, end: int, *, data_type_plural: str = "Elements") -> List[str]: +def check_for_gaps(array: list[int], start: int, end: int, *, data_type_plural: str = "Elements") -> list[str]: """ Check for compact sequence: Check that every value from start to end is present in array, and no more. @@ -100,7 +101,7 @@ def check_for_gaps(array: List[int], start: int, end: int, *, data_type_plural: if start > end: raise ValueError(f"{__name__} called with incorrect arguments: start={start} end={end} (start > end)") - errors: List[str] = [] + errors: list[str] = [] if start == end and len(array) == 1: return errors @@ -123,7 +124,7 @@ def check_for_gaps(array: List[int], start: int, end: int, *, data_type_plural: class FromDB: - def __init__(self, row: Iterable[Any], fields: List[str]) -> None: + def __init__(self, row: Iterable[Any], fields: list[str]) -> None: self.fields = fields for field, value in zip(fields, row): setattr(self, field, value) @@ -144,7 +145,7 @@ def wallet_type_name( return f"INVALID_WALLET_TYPE ({wallet_type})" -def _cwr(row: Row) -> List[Any]: +def _cwr(row: Row) -> list[Any]: r = [] for i, v in enumerate(row): if i == 2: @@ -175,7 +176,7 @@ class Wallet(FromDB): data: str -def dp_by_wallet_id(derivation_paths: List[DerivationPath]) -> Dict[int, List[DerivationPath]]: +def dp_by_wallet_id(derivation_paths: list[DerivationPath]) -> dict[int, list[DerivationPath]]: d = defaultdict(list) for derivation_path in derivation_paths: d[derivation_path.wallet_id].append(derivation_path) @@ -184,7 +185,7 @@ def dp_by_wallet_id(derivation_paths: List[DerivationPath]) -> Dict[int, List[De return d -def derivation_indices_by_wallet_id(derivation_paths: List[DerivationPath]) -> Dict[int, List[int]]: +def derivation_indices_by_wallet_id(derivation_paths: list[DerivationPath]) -> dict[int, list[int]]: d = dp_by_wallet_id(derivation_paths) di = {} for k, v in d.items(): @@ -192,7 +193,7 @@ def derivation_indices_by_wallet_id(derivation_paths: List[DerivationPath]) -> D return di -def print_min_max_derivation_for_wallets(derivation_paths: List[DerivationPath]) -> None: +def print_min_max_derivation_for_wallets(derivation_paths: list[DerivationPath]) -> None: d = derivation_indices_by_wallet_id(derivation_paths) print("Min, Max, Count of derivations for each wallet:") for wallet_id, derivation_index_list in d.items(): @@ -209,7 +210,7 @@ class WalletDBReader: sql_log_path: Optional[Path] = None verbose = False - async def get_all_wallets(self) -> List[Wallet]: + async def get_all_wallets(self) -> list[Wallet]: wallet_fields = ["id", "name", "wallet_type", "data"] async with self.db_wrapper.reader_no_transaction() as reader: # TODO: if table doesn't exist @@ -217,7 +218,7 @@ async def get_all_wallets(self) -> List[Wallet]: rows = await cursor.fetchall() return [Wallet(r, wallet_fields) for r in rows] - async def get_derivation_paths(self) -> List[DerivationPath]: + async def get_derivation_paths(self) -> list[DerivationPath]: fields = ["derivation_index", "pubkey", "puzzle_hash", "wallet_type", "wallet_id", "used", "hardened"] async with self.db_wrapper.reader_no_transaction() as reader: # TODO: if table doesn't exist @@ -225,7 +226,7 @@ async def get_derivation_paths(self) -> List[DerivationPath]: rows = await cursor.fetchall() return [DerivationPath(row, fields) for row in rows] - async def show_tables(self) -> List[str]: + async def show_tables(self) -> list[str]: async with self.db_wrapper.reader_no_transaction() as reader: cursor = await reader.execute("""SELECT name FROM sqlite_master WHERE type='table';""") print("\nWallet DB Tables:") @@ -237,7 +238,7 @@ async def show_tables(self) -> List[str]: print() return [] - async def check_wallets(self) -> List[str]: + async def check_wallets(self) -> list[str]: # id, name, wallet_type, data # TODO: Move this SQL up a level async with self.db_wrapper.reader_no_transaction() as reader: @@ -282,8 +283,8 @@ async def check_wallets(self) -> List[str]: return errors def check_wallets_missing_derivations( - self, wallets: List[Wallet], derivation_paths: List[DerivationPath] - ) -> List[str]: + self, wallets: list[Wallet], derivation_paths: list[DerivationPath] + ) -> list[str]: p = [] d = derivation_indices_by_wallet_id(derivation_paths) # TODO: calc this once, pass in for w in wallets: @@ -293,7 +294,7 @@ def check_wallets_missing_derivations( return [f"Wallet IDs with no derivations that require them: {p}"] return [] - def check_derivations_are_compact(self, wallets: List[Wallet], derivation_paths: List[DerivationPath]) -> List[str]: + def check_derivations_are_compact(self, wallets: list[Wallet], derivation_paths: list[DerivationPath]) -> list[str]: errors = [] """ Gaps in derivation index @@ -318,8 +319,8 @@ def check_derivations_are_compact(self, wallets: List[Wallet], derivation_paths: return errors def check_unexpected_derivation_entries( - self, wallets: List[Wallet], derivation_paths: List[DerivationPath] - ) -> List[str]: + self, wallets: list[Wallet], derivation_paths: list[DerivationPath] + ) -> list[str]: """ Check for unexpected derivation path entries diff --git a/chia/cmds/cmd_classes.py b/chia/cmds/cmd_classes.py index 663d51e06874..1aedf7ee963d 100644 --- a/chia/cmds/cmd_classes.py +++ b/chia/cmds/cmd_classes.py @@ -4,22 +4,10 @@ import collections import inspect import sys +from collections.abc import AsyncIterator from contextlib import asynccontextmanager from dataclasses import MISSING, dataclass, field, fields -from typing import ( - Any, - AsyncIterator, - Callable, - Dict, - List, - Optional, - Protocol, - Type, - Union, - get_args, - get_origin, - get_type_hints, -) +from typing import Any, Callable, Optional, Protocol, Union, get_args, get_origin, get_type_hints import click from typing_extensions import dataclass_transform @@ -96,10 +84,10 @@ def convert(self, value: str, param: Optional[click.Parameter], ctx: Optional[cl @dataclass class _CommandParsingStage: - my_dataclass: Type[ChiaCommand] - my_option_decorators: List[Callable[[SyncCmd], SyncCmd]] - my_members: Dict[str, _CommandParsingStage] - my_kwarg_names: List[str] + my_dataclass: type[ChiaCommand] + my_option_decorators: list[Callable[[SyncCmd], SyncCmd]] + my_members: dict[str, _CommandParsingStage] + my_kwarg_names: list[str] _needs_context: bool def needs_context(self) -> bool: @@ -108,14 +96,14 @@ def needs_context(self) -> bool: else: return any(member.needs_context() for member in self.my_members.values()) - def get_all_option_decorators(self) -> List[Callable[[SyncCmd], SyncCmd]]: - all_option_decorators: List[Callable[[SyncCmd], SyncCmd]] = self.my_option_decorators + def get_all_option_decorators(self) -> list[Callable[[SyncCmd], SyncCmd]]: + all_option_decorators: list[Callable[[SyncCmd], SyncCmd]] = self.my_option_decorators for member in self.my_members.values(): all_option_decorators.extend(member.get_all_option_decorators()) return all_option_decorators def initialize_instance(self, **kwargs: Any) -> ChiaCommand: - kwargs_to_pass: Dict[str, Any] = {} + kwargs_to_pass: dict[str, Any] = {} for kwarg_name in self.my_kwarg_names: kwargs_to_pass[kwarg_name] = kwargs[kwarg_name] @@ -130,7 +118,7 @@ def apply_decorators(self, cmd: SyncCmd) -> SyncCmd: def strip_click_context(func: SyncCmd) -> SyncCmd: def _inner(ctx: click.Context, **kwargs: Any) -> None: - context: Dict[str, Any] = ctx.obj if ctx.obj is not None else {} + context: dict[str, Any] = ctx.obj if ctx.obj is not None else {} func(context=context, **kwargs) return _inner @@ -152,10 +140,10 @@ def __call__(self, *args: Any, **kwargs: Any) -> None: instance.run() -def _generate_command_parser(cls: Type[ChiaCommand]) -> _CommandParsingStage: - option_decorators: List[Callable[[SyncCmd], SyncCmd]] = [] - kwarg_names: List[str] = [] - members: Dict[str, _CommandParsingStage] = {} +def _generate_command_parser(cls: type[ChiaCommand]) -> _CommandParsingStage: + option_decorators: list[Callable[[SyncCmd], SyncCmd]] = [] + kwarg_names: list[str] = [] + members: dict[str, _CommandParsingStage] = {} needs_context: bool = False hints = get_type_hints(cls) @@ -172,7 +160,7 @@ def _generate_command_parser(cls: Type[ChiaCommand]) -> _CommandParsingStage: needs_context = True kwarg_names.append(field_name) elif "option_args" in _field.metadata: - option_args: Dict[str, Any] = {"multiple": False, "required": False} + option_args: dict[str, Any] = {"multiple": False, "required": False} option_args.update(_field.metadata["option_args"]) if "type" not in option_args: @@ -230,24 +218,24 @@ def _generate_command_parser(cls: Type[ChiaCommand]) -> _CommandParsingStage: ) -def _convert_class_to_function(cls: Type[ChiaCommand]) -> SyncCmd: +def _convert_class_to_function(cls: type[ChiaCommand]) -> SyncCmd: command_parser = _generate_command_parser(cls) return command_parser.apply_decorators(command_parser) @dataclass_transform() -def chia_command(cmd: click.Group, name: str, help: str) -> Callable[[Type[ChiaCommand]], Type[ChiaCommand]]: - def _chia_command(cls: Type[ChiaCommand]) -> Type[ChiaCommand]: +def chia_command(cmd: click.Group, name: str, help: str) -> Callable[[type[ChiaCommand]], type[ChiaCommand]]: + def _chia_command(cls: type[ChiaCommand]) -> type[ChiaCommand]: # The type ignores here are largely due to the fact that the class information is not preserved after being # passed through the dataclass wrapper. Not sure what to do about this right now. if sys.version_info < (3, 10): # pragma: no cover # stuff below 3.10 doesn't know about kw_only - wrapped_cls: Type[ChiaCommand] = dataclass( # type: ignore[assignment] + wrapped_cls: type[ChiaCommand] = dataclass( # type: ignore[assignment] frozen=True, )(cls) else: - wrapped_cls: Type[ChiaCommand] = dataclass( # type: ignore[assignment] + wrapped_cls: type[ChiaCommand] = dataclass( # type: ignore[assignment] frozen=True, kw_only=True, )(cls) @@ -259,7 +247,7 @@ def _chia_command(cls: Type[ChiaCommand]) -> Type[ChiaCommand]: @dataclass_transform() -def command_helper(cls: Type[Any]) -> Type[Any]: +def command_helper(cls: type[Any]) -> type[Any]: if sys.version_info < (3, 10): # stuff below 3.10 doesn't support kw_only new_cls = dataclass(frozen=True)(cls) # pragma: no cover else: @@ -268,14 +256,14 @@ def command_helper(cls: Type[Any]) -> Type[Any]: return new_cls -Context = Dict[str, Any] +Context = dict[str, Any] @dataclass(frozen=True) class WalletClientInfo: client: WalletRpcClient fingerprint: int - config: Dict[str, Any] + config: dict[str, Any] @command_helper diff --git a/chia/cmds/cmds_util.py b/chia/cmds/cmds_util.py index a5f9b9dde668..c95c95600375 100644 --- a/chia/cmds/cmds_util.py +++ b/chia/cmds/cmds_util.py @@ -3,9 +3,10 @@ import dataclasses import logging import traceback +from collections.abc import AsyncIterator from contextlib import asynccontextmanager from pathlib import Path -from typing import Any, AsyncIterator, Callable, Dict, List, Optional, Tuple, Type, TypeVar +from typing import Any, Callable, Optional, TypeVar import click from aiohttp import ClientConnectorCertificateError, ClientConnectorError @@ -33,7 +34,7 @@ from chia.wallet.transaction_record import TransactionRecord from chia.wallet.util.tx_config import CoinSelectionConfig, CoinSelectionConfigLoader, TXConfig, TXConfigLoader -NODE_TYPES: Dict[str, Type[RpcClient]] = { +NODE_TYPES: dict[str, type[RpcClient]] = { "base": RpcClient, "farmer": FarmerRpcClient, "wallet": WalletRpcClient, @@ -43,7 +44,7 @@ "simulator": SimulatorFullNodeRpcClient, } -node_config_section_names: Dict[Type[RpcClient], str] = { +node_config_section_names: dict[type[RpcClient], str] = { RpcClient: "base", FarmerRpcClient: "farmer", WalletRpcClient: "wallet", @@ -92,12 +93,12 @@ async def validate_client_connection( @asynccontextmanager async def get_any_service_client( - client_type: Type[_T_RpcClient], + client_type: type[_T_RpcClient], rpc_port: Optional[int] = None, root_path: Optional[Path] = None, consume_errors: bool = True, use_ssl: bool = True, -) -> AsyncIterator[Tuple[_T_RpcClient, Dict[str, Any]]]: +) -> AsyncIterator[tuple[_T_RpcClient, dict[str, Any]]]: """ Yields a tuple with a RpcClient for the applicable node type a dictionary of the node's configuration, and a fingerprint if applicable. However, if connecting to the node fails then we will return None for @@ -151,7 +152,7 @@ async def get_any_service_client( async def get_wallet(root_path: Path, wallet_client: WalletRpcClient, fingerprint: Optional[int]) -> int: selected_fingerprint: int keychain_proxy: Optional[KeychainProxy] = None - all_keys: List[KeyData] = [] + all_keys: list[KeyData] = [] try: if fingerprint is not None: @@ -248,7 +249,7 @@ async def get_wallet_client( fingerprint: Optional[int] = None, root_path: Path = DEFAULT_ROOT_PATH, consume_errors: bool = True, -) -> AsyncIterator[Tuple[WalletRpcClient, int, Dict[str, Any]]]: +) -> AsyncIterator[tuple[WalletRpcClient, int, dict[str, Any]]]: async with get_any_service_client(WalletRpcClient, wallet_rpc_port, root_path, consume_errors) as ( wallet_client, config, @@ -351,17 +352,17 @@ def _convert_timelock_args_to_cvt(*args: Any, **kwargs: Any) -> None: @streamable @dataclasses.dataclass(frozen=True) class TransactionBundle(Streamable): - txs: List[TransactionRecord] + txs: list[TransactionRecord] def tx_out_cmd( enable_timelock_args: Optional[bool] = None, -) -> Callable[[Callable[..., List[TransactionRecord]]], Callable[..., None]]: +) -> Callable[[Callable[..., list[TransactionRecord]]], Callable[..., None]]: - def _tx_out_cmd(func: Callable[..., List[TransactionRecord]]) -> Callable[..., None]: + def _tx_out_cmd(func: Callable[..., list[TransactionRecord]]) -> Callable[..., None]: @timelock_args(enable=enable_timelock_args) def original_cmd(transaction_file: Optional[str] = None, **kwargs: Any) -> None: - txs: List[TransactionRecord] = func(**kwargs) + txs: list[TransactionRecord] = func(**kwargs) if transaction_file is not None: print(f"Writing transactions to file {transaction_file}:") with open(Path(transaction_file), "wb") as file: @@ -386,8 +387,8 @@ def original_cmd(transaction_file: Optional[str] = None, **kwargs: Any) -> None: class CMDCoinSelectionConfigLoader: min_coin_amount: CliAmount = cli_amount_none max_coin_amount: CliAmount = cli_amount_none - excluded_coin_amounts: Optional[List[CliAmount]] = None - excluded_coin_ids: Optional[List[bytes32]] = None + excluded_coin_amounts: Optional[list[CliAmount]] = None + excluded_coin_ids: Optional[list[bytes32]] = None def to_coin_selection_config(self, mojo_per_unit: int) -> CoinSelectionConfig: return CoinSelectionConfigLoader( @@ -406,7 +407,7 @@ def to_coin_selection_config(self, mojo_per_unit: int) -> CoinSelectionConfig: class CMDTXConfigLoader(CMDCoinSelectionConfigLoader): reuse_puzhash: Optional[bool] = None - def to_tx_config(self, mojo_per_unit: int, config: Dict[str, Any], fingerprint: int) -> TXConfig: + def to_tx_config(self, mojo_per_unit: int, config: dict[str, Any], fingerprint: int) -> TXConfig: cs_config = self.to_coin_selection_config(mojo_per_unit) return TXConfigLoader( cs_config.min_coin_amount, diff --git a/chia/cmds/coin_funcs.py b/chia/cmds/coin_funcs.py index c6c61f36205c..7a03d6973d8d 100644 --- a/chia/cmds/coin_funcs.py +++ b/chia/cmds/coin_funcs.py @@ -2,7 +2,8 @@ import dataclasses import sys -from typing import List, Optional, Sequence, Tuple +from collections.abc import Sequence +from typing import Optional from chia.cmds.cmds_util import CMDCoinSelectionConfigLoader, CMDTXConfigLoader, cli_confirm, get_wallet_client from chia.cmds.param_types import CliAmount @@ -84,7 +85,7 @@ async def async_list( def print_coins( - target_string: str, coins: List[Tuple[Coin, str]], mojo_per_unit: int, addr_prefix: str, paginate: bool + target_string: str, coins: list[tuple[Coin, str]], mojo_per_unit: int, addr_prefix: str, paginate: bool ) -> None: if len(coins) == 0: print("\tNo Coins.") @@ -129,7 +130,7 @@ async def async_combine( push: bool, condition_valid_times: ConditionValidTimes, override: bool, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fingerprint) as (wallet_client, fingerprint, config): try: wallet_type = await get_wallet_type(wallet_id=wallet_id, wallet_client=wallet_client) @@ -207,7 +208,7 @@ async def async_split( reuse_puzhash: bool, push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fingerprint) as (wallet_client, fingerprint, config): try: wallet_type = await get_wallet_type(wallet_id=wallet_id, wallet_client=wallet_client) @@ -248,7 +249,7 @@ async def async_split( reuse_puzhash=reuse_puzhash, ).to_tx_config(mojo_per_unit, config, fingerprint) - transactions: List[TransactionRecord] = ( + transactions: list[TransactionRecord] = ( await wallet_client.split_coins( SplitCoins( wallet_id=uint32(wallet_id), diff --git a/chia/cmds/coins.py b/chia/cmds/coins.py index d2c5124d70da..83f192c19d9d 100644 --- a/chia/cmds/coins.py +++ b/chia/cmds/coins.py @@ -1,7 +1,8 @@ from __future__ import annotations import asyncio -from typing import List, Optional, Sequence +from collections.abc import Sequence +from typing import Optional import click @@ -127,7 +128,7 @@ def combine_cmd( push: bool, condition_valid_times: ConditionValidTimes, override: bool, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: from .coin_funcs import async_combine return asyncio.run( @@ -197,7 +198,7 @@ def split_cmd( reuse: bool, push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: from .coin_funcs import async_split return asyncio.run( diff --git a/chia/cmds/dao.py b/chia/cmds/dao.py index 84de7bcbe34b..16cfe7ab0833 100644 --- a/chia/cmds/dao.py +++ b/chia/cmds/dao.py @@ -1,7 +1,8 @@ from __future__ import annotations import asyncio -from typing import List, Optional, Sequence +from collections.abc import Sequence +from typing import Optional import click @@ -169,7 +170,7 @@ def dao_create_cmd( reuse: Optional[bool], push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: from .dao_funcs import create_dao_wallet if self_destruct == proposal_timelock: @@ -271,7 +272,7 @@ def dao_add_funds_cmd( reuse: Optional[bool], push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: from .dao_funcs import add_funds_to_treasury return asyncio.run( @@ -449,7 +450,7 @@ def dao_vote_cmd( reuse: Optional[bool], push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: from .dao_funcs import vote_on_proposal is_yes_vote = False if vote_no else True @@ -521,7 +522,7 @@ def dao_close_proposal_cmd( reuse: Optional[bool], push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: from .dao_funcs import close_proposal return asyncio.run( @@ -582,7 +583,7 @@ def dao_lockup_coins_cmd( reuse: Optional[bool], push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: from .dao_funcs import lockup_coins return asyncio.run( @@ -630,7 +631,7 @@ def dao_release_coins_cmd( reuse: Optional[bool], push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: from .dao_funcs import release_coins return asyncio.run( @@ -677,7 +678,7 @@ def dao_exit_lockup_cmd( reuse: Optional[bool], push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: from .dao_funcs import exit_lockup return asyncio.run( @@ -778,7 +779,7 @@ def dao_create_spend_proposal_cmd( reuse: Optional[bool], push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: from .dao_funcs import create_spend_proposal return asyncio.run( @@ -887,7 +888,7 @@ def dao_create_update_proposal_cmd( reuse: Optional[bool], push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: from .dao_funcs import create_update_proposal return asyncio.run( @@ -967,7 +968,7 @@ def dao_create_mint_proposal_cmd( reuse: Optional[bool], push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: from .dao_funcs import create_mint_proposal return asyncio.run( diff --git a/chia/cmds/dao_funcs.py b/chia/cmds/dao_funcs.py index 947c70abd631..3712965c3aab 100644 --- a/chia/cmds/dao_funcs.py +++ b/chia/cmds/dao_funcs.py @@ -4,7 +4,7 @@ import json import time from decimal import Decimal -from typing import List, Optional +from typing import Optional from chia.cmds.cmds_util import CMDTXConfigLoader, get_wallet_client, transaction_status_msg, transaction_submitted_msg from chia.cmds.param_types import CliAmount @@ -62,7 +62,7 @@ async def create_dao_wallet( cli_tx_config: CMDTXConfigLoader, push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: if proposal_minimum % 2 == 0: proposal_minimum = uint64(1 + proposal_minimum) print("Adding 1 mojo to proposal minimum amount") @@ -131,7 +131,7 @@ async def add_funds_to_treasury( cli_tx_config: CMDTXConfigLoader, push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): try: typ = await get_wallet_type(wallet_id=funding_wallet_id, wallet_client=wallet_client) @@ -285,7 +285,7 @@ async def vote_on_proposal( cli_tx_config: CMDTXConfigLoader, push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): res = await wallet_client.dao_vote_on_proposal( wallet_id=wallet_id, @@ -322,7 +322,7 @@ async def close_proposal( cli_tx_config: CMDTXConfigLoader, push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): res = await wallet_client.dao_close_proposal( wallet_id=wallet_id, @@ -358,7 +358,7 @@ async def lockup_coins( cli_tx_config: CMDTXConfigLoader, push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: final_amount: uint64 = amount.convert_amount(units["cat"]) async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): res = await wallet_client.dao_send_to_lockup( @@ -393,7 +393,7 @@ async def release_coins( cli_tx_config: CMDTXConfigLoader, push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): res = await wallet_client.dao_free_coins_from_finished_proposals( wallet_id=wallet_id, @@ -425,7 +425,7 @@ async def exit_lockup( cli_tx_config: CMDTXConfigLoader, push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): res = await wallet_client.dao_exit_lockup( wallet_id=wallet_id, @@ -464,7 +464,7 @@ async def create_spend_proposal( cli_tx_config: CMDTXConfigLoader, push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: if additions_file is None and (address is None or amount is None): raise ValueError("Must include a json specification or an address / amount pair.") if additions_file: # pragma: no cover @@ -518,7 +518,7 @@ async def create_update_proposal( cli_tx_config: CMDTXConfigLoader, push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: new_dao_rules = { "proposal_timelock": proposal_timelock, "soft_close_length": soft_close_length, @@ -556,7 +556,7 @@ async def create_mint_proposal( cli_tx_config: CMDTXConfigLoader, push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): res = await wallet_client.dao_create_proposal( wallet_id=wallet_id, diff --git a/chia/cmds/data.py b/chia/cmds/data.py index 426a0a092256..0547a989350f 100644 --- a/chia/cmds/data.py +++ b/chia/cmds/data.py @@ -2,8 +2,9 @@ import json import logging +from collections.abc import Coroutine, Sequence from pathlib import Path -from typing import Any, Callable, Coroutine, Dict, List, Optional, Sequence, TypeVar, Union +from typing import Any, Callable, Optional, TypeVar, Union import click @@ -20,7 +21,7 @@ # TODO: this is more general and should be part of refactoring the overall CLI code duplication -def run(coro: Coroutine[Any, Any, Optional[Dict[str, Any]]]) -> None: +def run(coro: Coroutine[Any, Any, Optional[dict[str, Any]]]) -> None: import asyncio response = asyncio.run(coro) @@ -344,7 +345,7 @@ def get_root( @options.create_fingerprint() def subscribe( id: bytes32, - urls: List[str], + urls: list[str], data_rpc_port: int, fingerprint: Optional[int], ) -> None: @@ -360,7 +361,7 @@ def subscribe( @options.create_fingerprint() def remove_subscription( id: bytes32, - urls: List[str], + urls: list[str], data_rpc_port: int, fingerprint: Optional[int], ) -> None: @@ -492,7 +493,7 @@ def add_missing_files( def add_mirror( id: bytes32, amount: int, - urls: List[str], + urls: list[str], fee: Optional[uint64], data_rpc_port: int, fingerprint: Optional[int], @@ -674,7 +675,7 @@ def wallet_log_in( @options.create_fingerprint() def get_proof( id: bytes32, - key_strings: List[str], + key_strings: list[str], data_rpc_port: int, fingerprint: Optional[int], ) -> None: diff --git a/chia/cmds/data_funcs.py b/chia/cmds/data_funcs.py index 9afb586b96f3..70757ca5ac74 100644 --- a/chia/cmds/data_funcs.py +++ b/chia/cmds/data_funcs.py @@ -2,8 +2,9 @@ import contextlib import json +from collections.abc import AsyncIterator from pathlib import Path -from typing import Any, AsyncIterator, Dict, List, Optional, Tuple +from typing import Any, Optional from chia.cmds.cmds_util import get_any_service_client from chia.rpc.data_layer_rpc_client import DataLayerRpcClient @@ -15,7 +16,7 @@ @contextlib.asynccontextmanager async def get_client( rpc_port: Optional[int], fingerprint: Optional[int] = None, root_path: Optional[Path] = None -) -> AsyncIterator[Tuple[DataLayerRpcClient, Dict[str, Any]]]: +) -> AsyncIterator[tuple[DataLayerRpcClient, dict[str, Any]]]: async with get_any_service_client( client_type=DataLayerRpcClient, rpc_port=rpc_port, @@ -63,12 +64,12 @@ async def get_value_cmd( async def update_data_store_cmd( rpc_port: Optional[int], store_id: bytes32, - changelist: List[Dict[str, str]], + changelist: list[dict[str, str]], fee: Optional[uint64], fingerprint: Optional[int], submit_on_chain: bool, root_path: Optional[Path] = None, -) -> Dict[str, Any]: +) -> dict[str, Any]: res = dict() async with get_client(rpc_port=rpc_port, fingerprint=fingerprint, root_path=root_path) as (client, _): res = await client.update_data_store( @@ -84,12 +85,12 @@ async def update_data_store_cmd( async def update_multiple_stores_cmd( rpc_port: Optional[int], - store_updates: List[Dict[str, str]], + store_updates: list[dict[str, str]], fee: Optional[uint64], fingerprint: Optional[int], submit_on_chain: bool, root_path: Optional[Path] = None, -) -> Dict[str, Any]: +) -> dict[str, Any]: res = dict() async with get_client(rpc_port=rpc_port, fingerprint=fingerprint, root_path=root_path) as (client, _): @@ -109,7 +110,7 @@ async def submit_pending_root_cmd( fee: Optional[uint64], fingerprint: Optional[int], root_path: Optional[Path] = None, -) -> Dict[str, Any]: +) -> dict[str, Any]: res = dict() async with get_client(rpc_port=rpc_port, fingerprint=fingerprint, root_path=root_path) as (client, _): res = await client.submit_pending_root( @@ -126,7 +127,7 @@ async def submit_all_pending_roots_cmd( fee: Optional[uint64], fingerprint: Optional[int], root_path: Optional[Path] = None, -) -> Dict[str, Any]: +) -> dict[str, Any]: res = dict() async with get_client(rpc_port=rpc_port, fingerprint=fingerprint, root_path=root_path) as (client, _): res = await client.submit_all_pending_roots(fee=fee) @@ -144,7 +145,7 @@ async def get_keys_cmd( page: Optional[int], max_page_size: Optional[int], root_path: Optional[Path] = None, -) -> Dict[str, Any]: +) -> dict[str, Any]: res = dict() async with get_client(rpc_port=rpc_port, fingerprint=fingerprint, root_path=root_path) as (client, _): res = await client.get_keys(store_id=store_id, root_hash=root_hash, page=page, max_page_size=max_page_size) @@ -162,7 +163,7 @@ async def get_keys_values_cmd( page: Optional[int], max_page_size: Optional[int], root_path: Optional[Path] = None, -) -> Dict[str, Any]: +) -> dict[str, Any]: res = dict() async with get_client(rpc_port=rpc_port, fingerprint=fingerprint, root_path=root_path) as (client, _): res = await client.get_keys_values( @@ -186,7 +187,7 @@ async def get_root_cmd( async def subscribe_cmd( rpc_port: Optional[int], store_id: bytes32, - urls: List[str], + urls: list[str], fingerprint: Optional[int], ) -> None: async with get_client(rpc_port=rpc_port, fingerprint=fingerprint) as (client, _): @@ -208,7 +209,7 @@ async def unsubscribe_cmd( async def remove_subscriptions_cmd( rpc_port: Optional[int], store_id: bytes32, - urls: List[str], + urls: list[str], fingerprint: Optional[int], ) -> None: async with get_client(rpc_port=rpc_port, fingerprint=fingerprint) as (client, _): @@ -225,7 +226,7 @@ async def get_kv_diff_cmd( page: Optional[int], max_page_size: Optional[int], root_path: Optional[Path] = None, -) -> Dict[str, Any]: +) -> dict[str, Any]: res = dict() async with get_client(rpc_port=rpc_port, fingerprint=fingerprint, root_path=root_path) as (client, _): res = await client.get_kv_diff( @@ -248,7 +249,7 @@ async def get_root_history_cmd( async def add_missing_files_cmd( rpc_port: Optional[int], - ids: Optional[List[bytes32]], + ids: Optional[list[bytes32]], overwrite: bool, foldername: Optional[Path], fingerprint: Optional[int], @@ -265,7 +266,7 @@ async def add_missing_files_cmd( async def add_mirror_cmd( rpc_port: Optional[int], store_id: bytes32, - urls: List[str], + urls: list[str], amount: int, fee: Optional[uint64], fingerprint: Optional[int], @@ -343,7 +344,7 @@ async def clear_pending_roots( rpc_port: Optional[int], root_path: Optional[Path] = None, fingerprint: Optional[int] = None, -) -> Dict[str, Any]: +) -> dict[str, Any]: async with get_client(rpc_port=rpc_port, fingerprint=fingerprint, root_path=root_path) as (client, _): result = await client.clear_pending_roots(store_id=store_id) print(json.dumps(result, indent=2, sort_keys=True)) @@ -353,11 +354,11 @@ async def clear_pending_roots( async def get_proof_cmd( store_id: bytes32, - key_strings: List[str], + key_strings: list[str], rpc_port: Optional[int], root_path: Optional[Path] = None, fingerprint: Optional[int] = None, -) -> Dict[str, Any]: +) -> dict[str, Any]: result = dict() async with get_client(rpc_port=rpc_port, fingerprint=fingerprint, root_path=root_path) as (client, _): result = await client.get_proof(store_id=store_id, keys=[hexstr_to_bytes(key) for key in key_strings]) @@ -367,11 +368,11 @@ async def get_proof_cmd( async def verify_proof_cmd( - proof: Dict[str, Any], + proof: dict[str, Any], rpc_port: Optional[int], root_path: Optional[Path] = None, fingerprint: Optional[int] = None, -) -> Dict[str, Any]: +) -> dict[str, Any]: result = dict() async with get_client(rpc_port=rpc_port, fingerprint=fingerprint, root_path=root_path) as (client, _): result = await client.verify_proof(proof=proof) diff --git a/chia/cmds/db_backup_func.py b/chia/cmds/db_backup_func.py index d63c12bece80..2beef7b54496 100644 --- a/chia/cmds/db_backup_func.py +++ b/chia/cmds/db_backup_func.py @@ -1,7 +1,7 @@ from __future__ import annotations from pathlib import Path -from typing import Any, Dict, Optional +from typing import Any, Optional from chia.util.config import load_config from chia.util.path import path_from_root @@ -13,7 +13,7 @@ def db_backup_func( *, no_indexes: bool, ) -> None: - config: Dict[str, Any] = load_config(root_path, "config.yaml")["full_node"] + config: dict[str, Any] = load_config(root_path, "config.yaml")["full_node"] selected_network: str = config["selected_network"] db_pattern: str = config["database_path"] db_path_replaced: str = db_pattern.replace("CHALLENGE", selected_network) diff --git a/chia/cmds/db_upgrade_func.py b/chia/cmds/db_upgrade_func.py index 0bdc1871b820..2721dbf64e43 100644 --- a/chia/cmds/db_upgrade_func.py +++ b/chia/cmds/db_upgrade_func.py @@ -10,7 +10,7 @@ from contextlib import closing from pathlib import Path from time import monotonic -from typing import Any, Dict, List, Optional +from typing import Any, Optional import zstd @@ -33,7 +33,7 @@ def db_upgrade_func( ) -> None: update_config: bool = in_db_path is None and out_db_path is None and not no_update_config - config: Dict[str, Any] + config: dict[str, Any] selected_network: str db_pattern: str if in_db_path is None or out_db_path is None: @@ -183,7 +183,7 @@ def convert_v1_to_v2(in_path: Path, out_path: Path) -> None: parameter_limit = get_host_parameter_limit() start_time = monotonic() block_start_time = start_time - rowids: List[int] = [] + rowids: list[int] = [] small_batch_size = BATCH_SIZE <= parameter_limit small_chain = peak_height <= parameter_limit current_header_hash = peak_hash diff --git a/chia/cmds/db_validate_func.py b/chia/cmds/db_validate_func.py index 530d23addf07..2358095b4c73 100644 --- a/chia/cmds/db_validate_func.py +++ b/chia/cmds/db_validate_func.py @@ -1,7 +1,7 @@ from __future__ import annotations from pathlib import Path -from typing import Any, Dict, Optional +from typing import Any, Optional from chia.consensus.block_record import BlockRecord from chia.consensus.default_constants import DEFAULT_CONSTANTS @@ -18,7 +18,7 @@ def db_validate_func( validate_blocks: bool, ) -> None: if in_db_path is None: - config: Dict[str, Any] = load_config(root_path, "config.yaml")["full_node"] + config: dict[str, Any] = load_config(root_path, "config.yaml")["full_node"] selected_network: str = config["selected_network"] db_pattern: str = config["database_path"] db_path_replaced: str = db_pattern.replace("CHALLENGE", selected_network) diff --git a/chia/cmds/farm_funcs.py b/chia/cmds/farm_funcs.py index 9a4b4f51b6b7..e1958b0a4b5e 100644 --- a/chia/cmds/farm_funcs.py +++ b/chia/cmds/farm_funcs.py @@ -3,7 +3,7 @@ import sys import traceback from pathlib import Path -from typing import Any, Dict, List, Optional +from typing import Any, Optional from chia.cmds.cmds_util import format_bytes, format_minutes, get_any_service_client from chia.cmds.units import units @@ -20,14 +20,14 @@ async def get_harvesters_summary( farmer_rpc_port: Optional[int], root_path: Path = DEFAULT_ROOT_PATH -) -> Optional[Dict[str, Any]]: +) -> Optional[dict[str, Any]]: async with get_any_service_client(FarmerRpcClient, farmer_rpc_port, root_path) as (farmer_client, _): return await farmer_client.get_harvesters_summary() async def get_blockchain_state( rpc_port: Optional[int], root_path: Path = DEFAULT_ROOT_PATH -) -> Optional[Dict[str, Any]]: +) -> Optional[dict[str, Any]]: async with get_any_service_client(FullNodeRpcClient, rpc_port, root_path) as (client, _): return await client.get_blockchain_state() @@ -55,12 +55,12 @@ async def get_average_block_time(rpc_port: Optional[int], root_path: Path = DEFA async def get_wallets_stats( wallet_rpc_port: Optional[int], root_path: Path = DEFAULT_ROOT_PATH -) -> Optional[Dict[str, Any]]: +) -> Optional[dict[str, Any]]: async with get_any_service_client(WalletRpcClient, wallet_rpc_port, root_path) as (wallet_client, _): return await wallet_client.get_farmed_amount() -async def get_challenges(farmer_rpc_port: Optional[int]) -> Optional[List[Dict[str, Any]]]: +async def get_challenges(farmer_rpc_port: Optional[int]) -> Optional[list[dict[str, Any]]]: async with get_any_service_client(FarmerRpcClient, farmer_rpc_port) as (farmer_client, _): return await farmer_client.get_signage_points() @@ -135,8 +135,8 @@ class PlotStats: total_plots = 0 if harvesters_summary is not None: - harvesters_local: Dict[str, Dict[str, Any]] = {} - harvesters_remote: Dict[str, Dict[str, Any]] = {} + harvesters_local: dict[str, dict[str, Any]] = {} + harvesters_remote: dict[str, dict[str, Any]] = {} for harvester in harvesters_summary["harvesters"]: ip = harvester["connection"]["host"] if is_localhost(ip): @@ -146,7 +146,7 @@ class PlotStats: harvesters_remote[ip] = {} harvesters_remote[ip][harvester["connection"]["node_id"]] = harvester - def process_harvesters(harvester_peers_in: Dict[str, Dict[str, Any]]) -> None: + def process_harvesters(harvester_peers_in: dict[str, dict[str, Any]]) -> None: for harvester_peer_id, harvester_dict in harvester_peers_in.items(): syncing = harvester_dict["syncing"] if syncing is not None and syncing["initial"]: diff --git a/chia/cmds/init_funcs.py b/chia/cmds/init_funcs.py index d1b8ac383137..e6a4794e4fca 100644 --- a/chia/cmds/init_funcs.py +++ b/chia/cmds/init_funcs.py @@ -4,7 +4,7 @@ import shutil import sqlite3 from pathlib import Path -from typing import Any, Dict, List, Optional +from typing import Any, Optional import yaml @@ -40,7 +40,7 @@ ) -def dict_add_new_default(updated: Dict[str, Any], default: Dict[str, Any], do_not_migrate_keys: Dict[str, Any]) -> None: +def dict_add_new_default(updated: dict[str, Any], default: dict[str, Any], do_not_migrate_keys: dict[str, Any]) -> None: for k in do_not_migrate_keys: if k in updated and do_not_migrate_keys[k] == "": updated.pop(k) @@ -168,8 +168,8 @@ def copy_files_rec(old_path: Path, new_path: Path) -> None: def migrate_from( old_root: Path, new_root: Path, - manifest: List[str], - do_not_migrate_settings: List[str], + manifest: list[str], + do_not_migrate_settings: list[str], ) -> int: """ Copy all the files in "manifest" to the new config directory. @@ -192,7 +192,7 @@ def migrate_from( with lock_and_load_config(new_root, "config.yaml") as config: config_str: str = initial_config_file("config.yaml") - default_config: Dict[str, Any] = yaml.safe_load(config_str) + default_config: dict[str, Any] = yaml.safe_load(config_str) flattened_keys = unflatten_properties({k: "" for k in do_not_migrate_settings}) dict_add_new_default(config, default_config, flattened_keys) @@ -331,7 +331,7 @@ def chia_init( if should_check_keys: check_keys(root_path) - config: Dict[str, Any] + config: dict[str, Any] db_path_replaced: str if v1_db: diff --git a/chia/cmds/installers.py b/chia/cmds/installers.py index 8ffed570146c..fd0cae1f553f 100644 --- a/chia/cmds/installers.py +++ b/chia/cmds/installers.py @@ -3,7 +3,7 @@ import json import subprocess import tempfile -from typing import Dict, List, Optional +from typing import Optional import click import packaging.version @@ -12,7 +12,7 @@ from chia.util.timing import adjusted_timeout -def check_plotter(plotter: List[str], expected_output: bytes, specify_tmp: bool = True) -> None: +def check_plotter(plotter: list[str], expected_output: bytes, specify_tmp: bool = True) -> None: with tempfile.TemporaryDirectory() as path: tmp_dir = [] if specify_tmp: @@ -88,7 +88,7 @@ def test_command(expected_chia_version_str: str, require_madmax: bool) -> None: assert plotter_version_process.stderr == "" found_start = False - plotter_versions: Dict[str, packaging.version.Version] = {} + plotter_versions: dict[str, packaging.version.Version] = {} for line in plotter_version_process.stdout.splitlines(): if line.startswith("chiapos:"): found_start = True diff --git a/chia/cmds/keys.py b/chia/cmds/keys.py index ff9a30ea064f..1a7194beed3f 100644 --- a/chia/cmds/keys.py +++ b/chia/cmds/keys.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Optional, Tuple +from typing import Optional import click from chia_rs import PrivateKey @@ -214,7 +214,7 @@ def sign_cmd( sign(message, resolved_sk, hd_path, as_bytes, json) -def parse_signature_json(json_str: str) -> Tuple[str, str, str, str]: +def parse_signature_json(json_str: str) -> tuple[str, str, str, str]: import json try: @@ -320,11 +320,11 @@ def derive_cmd(ctx: click.Context, fingerprint: Optional[int], filename: Optiona @click.pass_context def search_cmd( ctx: click.Context, - search_terms: Tuple[str, ...], + search_terms: tuple[str, ...], limit: int, non_observer_derivation: bool, show_progress: bool, - search_type: Tuple[str, ...], + search_type: tuple[str, ...], derive_from_hd_path: Optional[str], prefix: Optional[str], ) -> None: @@ -364,7 +364,7 @@ class ResolutionError(Exception): def _resolve_fingerprint_and_sk( filename: Optional[str], fingerprint: Optional[int], non_observer_derivation: bool -) -> Tuple[Optional[int], Optional[PrivateKey]]: +) -> tuple[Optional[int], Optional[PrivateKey]]: from .keys_funcs import resolve_derivation_master_key reolved_fp, resolved_sk = resolve_derivation_master_key(filename if filename is not None else fingerprint) diff --git a/chia/cmds/keys_funcs.py b/chia/cmds/keys_funcs.py index 4a4681bec91f..78e8712eb0f9 100644 --- a/chia/cmds/keys_funcs.py +++ b/chia/cmds/keys_funcs.py @@ -5,7 +5,7 @@ import sys from enum import Enum from pathlib import Path -from typing import Any, Dict, List, Optional, Tuple, Union +from typing import Any, Optional, Union from chia_rs import AugSchemeMPL, G1Element, G2Element, PrivateKey @@ -174,8 +174,8 @@ def show_keys( msg = "Showing all public and private keys" print(msg) - def process_key_data(key_data: KeyData) -> Dict[str, Any]: - key: Dict[str, Any] = {} + def process_key_data(key_data: KeyData) -> dict[str, Any]: + key: dict[str, Any] = {} sk = key_data.private_key if key_data.secrets is not None else None if key_data.label is not None: key["label"] = key_data.label @@ -252,7 +252,7 @@ def delete(fingerprint: int) -> None: def derive_pk_and_sk_from_hd_path( master_pk: G1Element, hd_path_root: str, master_sk: Optional[PrivateKey] = None -) -> Tuple[G1Element, Optional[PrivateKey], str]: +) -> tuple[G1Element, Optional[PrivateKey], str]: """ Derive a private key from the provided HD path. Takes a master key and HD path as input, and returns the derived key and the HD path that was used to derive it. @@ -264,7 +264,7 @@ class DerivationType(Enum): NONOBSERVER = 0 OBSERVER = 1 - path: List[str] = hd_path_root.split("/") + path: list[str] = hd_path_root.split("/") if len(path) == 0 or path[0] != "m": raise ValueError("Invalid HD path. Must start with 'm'") @@ -273,7 +273,7 @@ class DerivationType(Enum): if len(path) > 0 and path[-1] == "": # remove trailing slash path = path[:-1] - index_and_derivation_types: List[Tuple[int, DerivationType]] = [] + index_and_derivation_types: list[tuple[int, DerivationType]] = [] # Validate path for current_index_str in path: @@ -361,9 +361,9 @@ def _clear_line_part(n: int) -> None: def _search_derived( current_pk: G1Element, current_sk: Optional[PrivateKey], - search_terms: Tuple[str, ...], + search_terms: tuple[str, ...], path: str, - path_indices: Optional[List[int]], + path_indices: Optional[list[int]], limit: int, non_observer_derivation: bool, show_progress: bool, @@ -371,7 +371,7 @@ def _search_derived( search_private_key: bool, search_address: bool, prefix: str, -) -> List[str]: # Return a subset of search_terms that were found +) -> list[str]: # Return a subset of search_terms that were found """ Performs a shallow search of keys derived from the current pk/sk for items matching the provided search terms. @@ -384,15 +384,15 @@ class DerivedSearchResultType(Enum): PRIVATE_KEY = "private key" WALLET_ADDRESS = "wallet address" - remaining_search_terms: Dict[str, None] = dict.fromkeys(search_terms) + remaining_search_terms: dict[str, None] = dict.fromkeys(search_terms) current_path: str = path - current_path_indices: List[int] = path_indices if path_indices is not None else [] - found_search_terms: List[str] = [] + current_path_indices: list[int] = path_indices if path_indices is not None else [] + found_search_terms: list[str] = [] assert not (non_observer_derivation and current_sk is None) for index in range(limit): - found_items: List[Tuple[str, str, DerivedSearchResultType]] = [] + found_items: list[tuple[str, str, DerivedSearchResultType]] = [] printed_match: bool = False current_index_str = str(index) + ("n" if non_observer_derivation else "") current_path += f"{current_index_str}" @@ -480,11 +480,11 @@ class DerivedSearchResultType(Enum): def search_derive( root_path: Path, fingerprint: Optional[int], - search_terms: Tuple[str, ...], + search_terms: tuple[str, ...], limit: int, non_observer_derivation: bool, show_progress: bool, - search_types: Tuple[str, ...], + search_types: tuple[str, ...], derive_from_hd_path: Optional[str], prefix: Optional[str], private_key: Optional[PrivateKey], @@ -497,13 +497,13 @@ def search_derive( from time import perf_counter start_time = perf_counter() - remaining_search_terms: Dict[str, None] = dict.fromkeys(search_terms) # poor man's ordered set + remaining_search_terms: dict[str, None] = dict.fromkeys(search_terms) # poor man's ordered set search_address = "address" in search_types search_public_key = "public_key" in search_types search_private_key = "private_key" in search_types if prefix is None: - config: Dict[str, Any] = load_config(root_path, "config.yaml") + config: dict[str, Any] = load_config(root_path, "config.yaml") selected: str = config["selected_network"] prefix = config["network_overrides"]["config"][selected]["address_prefix"] @@ -513,8 +513,8 @@ def search_derive( search_private_key = True if fingerprint is None and private_key is None: - public_keys: List[G1Element] = Keychain().get_all_public_keys() - private_keys: List[Optional[PrivateKey]] = [ + public_keys: list[G1Element] = Keychain().get_all_public_keys() + private_keys: list[Optional[PrivateKey]] = [ data.private_key if data.secrets is not None else None for data in Keychain().get_keys(include_secrets=True) ] elif fingerprint is None: @@ -530,7 +530,7 @@ def search_derive( if sk is None and non_observer_derivation: continue current_path: str = "" - found_terms: List[str] = [] + found_terms: list[str] = [] if show_progress: print(f"Searching keys derived from: {pk.get_fingerprint()}") @@ -571,7 +571,7 @@ def search_derive( current_path = hd_path_root # Otherwise derive from well-known derivation paths else: - current_path_indices: List[int] = [12381, 8444] + current_path_indices: list[int] = [12381, 8444] path_root: str = "m/" for i in [12381, 8444]: path_root += f"{i}{'n' if non_observer_derivation else ''}/" @@ -668,10 +668,10 @@ def derive_wallet_address( pk = sk.get_g1() if prefix is None: - config: Dict[str, Any] = load_config(root_path, "config.yaml") + config: dict[str, Any] = load_config(root_path, "config.yaml") selected: str = config["selected_network"] prefix = config["network_overrides"]["config"][selected]["address_prefix"] - path_indices: List[int] = [12381, 8444, 2] + path_indices: list[int] = [12381, 8444, 2] wallet_hd_path_root: str = "m/" for i in path_indices: wallet_hd_path_root += f"{i}{'n' if non_observer_derivation else ''}/" @@ -728,7 +728,7 @@ def derive_child_key( # Key type was specified if key_type is not None: - path_indices: List[int] = [12381, 8444] + path_indices: list[int] = [12381, 8444] path_indices.append( { "farmer": 0, @@ -802,7 +802,7 @@ def private_key_for_fingerprint(fingerprint: int) -> Optional[PrivateKey]: def prompt_for_fingerprint() -> Optional[int]: - fingerprints: List[int] = [pk.get_fingerprint() for pk in Keychain().get_all_public_keys()] + fingerprints: list[int] = [pk.get_fingerprint() for pk in Keychain().get_all_public_keys()] while True: print("Choose key:") for i, fp in enumerate(fingerprints): @@ -826,7 +826,7 @@ def prompt_for_fingerprint() -> Optional[int]: def get_private_key_with_fingerprint_or_prompt( fingerprint: Optional[int], -) -> Tuple[Optional[int], Optional[PrivateKey]]: +) -> tuple[Optional[int], Optional[PrivateKey]]: """ Get a private key with the specified fingerprint. If fingerprint is not specified, prompt the user to select a key. @@ -852,7 +852,7 @@ def private_key_from_mnemonic_seed_file(filename: Path) -> PrivateKey: def resolve_derivation_master_key( fingerprint_or_filename: Optional[Union[int, str, Path]] -) -> Tuple[Optional[int], Optional[PrivateKey]]: +) -> tuple[Optional[int], Optional[PrivateKey]]: """ Given a key fingerprint of file containing a mnemonic seed, return the private key. """ diff --git a/chia/cmds/passphrase_funcs.py b/chia/cmds/passphrase_funcs.py index 4e24804d4fd5..aaec65e84722 100644 --- a/chia/cmds/passphrase_funcs.py +++ b/chia/cmds/passphrase_funcs.py @@ -6,7 +6,7 @@ from getpass import getpass from io import TextIOWrapper from pathlib import Path -from typing import Any, Dict, Optional, Tuple +from typing import Any, Optional import colorama @@ -73,7 +73,7 @@ def obtain_current_passphrase(prompt: str = DEFAULT_PASSPHRASE_PROMPT, use_passp def verify_passphrase_meets_requirements( new_passphrase: str, confirmation_passphrase: str -) -> Tuple[bool, Optional[str]]: +) -> tuple[bool, Optional[str]]: match = new_passphrase == confirmation_passphrase min_length = Keychain.minimum_passphrase_length() meets_len_requirement = len(new_passphrase) >= min_length @@ -131,7 +131,7 @@ def prompt_to_save_passphrase() -> bool: return save -def prompt_for_new_passphrase() -> Tuple[str, bool]: +def prompt_for_new_passphrase() -> tuple[str, bool]: min_length: int = Keychain.minimum_passphrase_length() if min_length > 0: n = min_length @@ -324,7 +324,7 @@ def remove_passphrase_hint() -> None: print("Passphrase hint was not removed") -async def async_update_daemon_passphrase_cache_if_running(root_path: Path, config: Dict[str, Any]) -> None: +async def async_update_daemon_passphrase_cache_if_running(root_path: Path, config: dict[str, Any]) -> None: """ Attempt to connect to the daemon and update the cached passphrase """ diff --git a/chia/cmds/peer_funcs.py b/chia/cmds/peer_funcs.py index b678a661e037..4f95dbb4bcfb 100644 --- a/chia/cmds/peer_funcs.py +++ b/chia/cmds/peer_funcs.py @@ -1,7 +1,7 @@ from __future__ import annotations from pathlib import Path -from typing import Any, Dict, List, Optional +from typing import Any, Optional from chia.cmds.cmds_util import NODE_TYPES, get_any_service_client from chia.rpc.rpc_client import RpcClient @@ -49,7 +49,7 @@ async def remove_node_connection(rpc_client: RpcClient, remove_connection: str) print(result_txt) -async def print_connections(rpc_client: RpcClient, trusted_peers: Dict[str, Any], trusted_cidrs: List[str]) -> None: +async def print_connections(rpc_client: RpcClient, trusted_peers: dict[str, Any], trusted_cidrs: list[str]) -> None: import time from chia.server.outbound_message import NodeType @@ -119,8 +119,8 @@ async def peer_async( async with get_any_service_client(client_type, rpc_port, root_path) as (rpc_client, config): # Check or edit node connections if show_connections: - trusted_peers: Dict[str, Any] = config[node_type].get("trusted_peers", {}) - trusted_cidrs: List[str] = config[node_type].get("trusted_cidrs", []) + trusted_peers: dict[str, Any] = config[node_type].get("trusted_peers", {}) + trusted_cidrs: list[str] = config[node_type].get("trusted_cidrs", []) await print_connections(rpc_client, trusted_peers, trusted_cidrs) # if called together with state, leave a blank line if add_connection: diff --git a/chia/cmds/plotnft_funcs.py b/chia/cmds/plotnft_funcs.py index 2de6b9f18051..8671925b9c66 100644 --- a/chia/cmds/plotnft_funcs.py +++ b/chia/cmds/plotnft_funcs.py @@ -4,9 +4,10 @@ import functools import json import time +from collections.abc import Awaitable from dataclasses import replace from pprint import pprint -from typing import Any, Awaitable, Callable, Dict, List, Optional +from typing import Any, Callable, Optional import aiohttp @@ -38,12 +39,12 @@ from chia.wallet.util.wallet_types import WalletType -async def create_pool_args(pool_url: str) -> Dict[str, Any]: +async def create_pool_args(pool_url: str) -> dict[str, Any]: try: async with aiohttp.ClientSession() as session: async with session.get(f"{pool_url}/pool_info", ssl=ssl_context_for_root(get_mozilla_ca_crt())) as response: if response.ok: - json_dict: Dict[str, Any] = json.loads(await response.text()) + json_dict: dict[str, Any] = json.loads(await response.text()) else: raise ValueError(f"Response from {pool_url} not OK: {response.status}") except Exception as e: @@ -117,7 +118,7 @@ async def pprint_pool_wallet_state( wallet_id: int, pool_wallet_info: PoolWalletInfo, address_prefix: str, - pool_state_dict: Optional[Dict[str, Any]], + pool_state_dict: Optional[dict[str, Any]], ) -> None: print(f"Wallet ID: {wallet_id}") if pool_wallet_info.current.state == PoolSingletonState.LEAVING_POOL.value and pool_wallet_info.target is None: @@ -142,7 +143,7 @@ async def pprint_pool_wallet_state( print(f"Target state: {PoolSingletonState(pool_wallet_info.target.state).name}") print(f"Target pool URL: {pool_wallet_info.target.pool_url}") if pool_wallet_info.current.state == PoolSingletonState.SELF_POOLING.value: - balances: Dict[str, Any] = await wallet_client.get_wallet_balance(wallet_id) + balances: dict[str, Any] = await wallet_client.get_wallet_balance(wallet_id) balance = balances["confirmed_wallet_balance"] typ = WalletType(int(WalletType.POOLING_WALLET)) address_prefix, scale = wallet_coin_unit(typ, address_prefix) @@ -177,9 +178,9 @@ async def pprint_pool_wallet_state( async def pprint_all_pool_wallet_state( wallet_client: WalletRpcClient, - get_wallets_response: List[Dict[str, Any]], + get_wallets_response: list[dict[str, Any]], address_prefix: str, - pool_state_dict: Dict[bytes32, Dict[str, Any]], + pool_state_dict: dict[bytes32, dict[str, Any]], ) -> None: print(f"Wallet height: {(await wallet_client.get_height_info()).height}") print(f"Sync status: {'Synced' if (await wallet_client.get_sync_status()).synced else 'Not synced'}") @@ -205,7 +206,7 @@ async def show(wallet_rpc_port: Optional[int], fp: Optional[int], wallet_id_pass address_prefix = config["network_overrides"]["config"][config["selected_network"]]["address_prefix"] summaries_response = await wallet_client.get_wallets() pool_state_list = (await farmer_client.get_pool_state())["pool_state"] - pool_state_dict: Dict[bytes32, Dict[str, Any]] = { + pool_state_dict: dict[bytes32, dict[str, Any]] = { bytes32.from_hexstr(pool_state_item["pool_config"]["launcher_id"]): pool_state_item for pool_state_item in pool_state_list } @@ -246,7 +247,7 @@ async def get_login_link(launcher_id: bytes32) -> None: async def submit_tx_with_confirmation( message: str, prompt: bool, - func: Callable[[], Awaitable[Dict[str, Any]]], + func: Callable[[], Awaitable[dict[str, Any]]], wallet_client: WalletRpcClient, fingerprint: int, wallet_id: int, @@ -355,11 +356,11 @@ async def claim_cmd(*, wallet_rpc_port: Optional[int], fingerprint: int, fee: ui async def change_payout_instructions(launcher_id: str, address: CliAddress) -> None: - new_pool_configs: List[PoolWalletConfig] = [] + new_pool_configs: list[PoolWalletConfig] = [] id_found = False puzzle_hash = address.validate_address_type_get_ph(AddressType.XCH) - old_configs: List[PoolWalletConfig] = load_pool_config(DEFAULT_ROOT_PATH) + old_configs: list[PoolWalletConfig] = load_pool_config(DEFAULT_ROOT_PATH) for pool_config in old_configs: if pool_config.launcher_id == hexstr_to_bytes(launcher_id): id_found = True diff --git a/chia/cmds/rpc.py b/chia/cmds/rpc.py index 0d4612c4396b..f3d408adbf25 100644 --- a/chia/cmds/rpc.py +++ b/chia/cmds/rpc.py @@ -3,7 +3,7 @@ import asyncio import json import sys -from typing import Any, Dict, List, Optional, TextIO +from typing import Any, Optional, TextIO import click from aiohttp import ClientResponseError @@ -12,12 +12,12 @@ from chia.util.default_root import DEFAULT_ROOT_PATH from chia.util.ints import uint16 -services: List[str] = ["crawler", "daemon", "farmer", "full_node", "harvester", "timelord", "wallet", "data_layer"] +services: list[str] = ["crawler", "daemon", "farmer", "full_node", "harvester", "timelord", "wallet", "data_layer"] async def call_endpoint( - service: str, endpoint: str, request: Dict[str, Any], config: Dict[str, Any], quiet: bool = False -) -> Dict[str, Any]: + service: str, endpoint: str, request: dict[str, Any], config: dict[str, Any], quiet: bool = False +) -> dict[str, Any]: if service == "daemon": return await call_daemon_command(endpoint, request, config, quiet) @@ -25,8 +25,8 @@ async def call_endpoint( async def call_rpc_service_endpoint( - service: str, endpoint: str, request: Dict[str, Any], config: Dict[str, Any] -) -> Dict[str, Any]: + service: str, endpoint: str, request: dict[str, Any], config: dict[str, Any] +) -> dict[str, Any]: from chia.rpc.rpc_client import RpcClient port: uint16 @@ -40,7 +40,7 @@ async def call_rpc_service_endpoint( client = await RpcClient.create(config["self_hostname"], port, DEFAULT_ROOT_PATH, config) except Exception as e: raise Exception(f"Failed to create RPC client {service}: {e}") - result: Dict[str, Any] + result: dict[str, Any] try: result = await client.fetch(endpoint, request) except ClientResponseError as e: @@ -56,8 +56,8 @@ async def call_rpc_service_endpoint( async def call_daemon_command( - command: str, request: Dict[str, Any], config: Dict[str, Any], quiet: bool = False -) -> Dict[str, Any]: + command: str, request: dict[str, Any], config: dict[str, Any], quiet: bool = False +) -> dict[str, Any]: from chia.daemon.client import connect_to_daemon_and_validate daemon = await connect_to_daemon_and_validate(DEFAULT_ROOT_PATH, config, quiet=quiet) @@ -65,7 +65,7 @@ async def call_daemon_command( if daemon is None: raise Exception("Failed to connect to chia daemon") - result: Dict[str, Any] + result: dict[str, Any] try: ws_request = daemon.format_request(command, request) ws_response = await daemon._get(ws_request) @@ -77,11 +77,11 @@ async def call_daemon_command( return result -def print_result(json_dict: Dict[str, Any]) -> None: +def print_result(json_dict: dict[str, Any]) -> None: print(json.dumps(json_dict, indent=2, sort_keys=True)) -def get_routes(service: str, config: Dict[str, Any], quiet: bool = False) -> Dict[str, Any]: +def get_routes(service: str, config: dict[str, Any], quiet: bool = False) -> dict[str, Any]: return asyncio.run(call_endpoint(service, "get_routes", {}, config, quiet)) @@ -165,7 +165,7 @@ def rpc_client_cmd( "Can only use one request source: REQUEST argument OR -j/--json-file option. See the help with -h" ) - request_json: Dict[str, Any] = {} + request_json: dict[str, Any] = {} if json_file is not None: try: request_json = json.load(json_file) diff --git a/chia/cmds/show_funcs.py b/chia/cmds/show_funcs.py index 45a71f3249d4..89a1f3ff8656 100644 --- a/chia/cmds/show_funcs.py +++ b/chia/cmds/show_funcs.py @@ -2,13 +2,13 @@ import json from pathlib import Path -from typing import Any, Dict, List, Optional, Union +from typing import Any, Optional, Union from chia.rpc.full_node_rpc_client import FullNodeRpcClient from chia.types.blockchain_format.sized_bytes import bytes32 -async def print_blockchain_state(node_client: FullNodeRpcClient, config: Dict[str, Any]) -> bool: +async def print_blockchain_state(node_client: FullNodeRpcClient, config: dict[str, Any]) -> bool: import time from chia.cmds.cmds_util import format_bytes @@ -78,7 +78,7 @@ async def print_blockchain_state(node_client: FullNodeRpcClient, config: Dict[st print(f"Current VDF sub_slot_iters: {sub_slot_iters}") print("\n Height: | Hash:") - added_blocks: List[BlockRecord] = [] + added_blocks: list[BlockRecord] = [] curr = await node_client.get_block_record(peak.header_hash) while curr is not None and len(added_blocks) < num_blocks and curr.height > 0: added_blocks.append(curr) @@ -92,7 +92,7 @@ async def print_blockchain_state(node_client: FullNodeRpcClient, config: Dict[st async def print_block_from_hash( - node_client: FullNodeRpcClient, config: Dict[str, Any], block_by_header_hash: str + node_client: FullNodeRpcClient, config: dict[str, Any], block_by_header_hash: str ) -> None: import time diff --git a/chia/cmds/signer.py b/chia/cmds/signer.py index e710cd02547c..85d90337bdc2 100644 --- a/chia/cmds/signer.py +++ b/chia/cmds/signer.py @@ -3,11 +3,12 @@ import itertools import os import time +from collections.abc import Sequence from dataclasses import replace from functools import cached_property from pathlib import Path from threading import Event, Thread -from typing import List, Sequence, Type, TypeVar +from typing import TypeVar import click from chia_rs import AugSchemeMPL, G2Element @@ -61,7 +62,7 @@ class QrCodeDisplay: show_default=True, ) - def _display_qr(self, index: int, max_index: int, code_list: List[QRCode], stop_event: Event) -> None: + def _display_qr(self, index: int, max_index: int, code_list: list[QRCode], stop_event: Event) -> None: while not stop_event.is_set(): for qr_code in itertools.cycle(code_list): _clear_screen() @@ -74,7 +75,7 @@ def _display_qr(self, index: int, max_index: int, code_list: List[QRCode], stop_ if stop_event.is_set(): return - def display_qr_codes(self, blobs: List[bytes]) -> None: + def display_qr_codes(self, blobs: list[bytes]) -> None: chunk_sizes = [optimal_chunk_size_for_max_chunk_size(len(blob), self.qr_density) for blob in blobs] chunks = [create_chunks_for_blob(blob, chunk_size) for blob, chunk_size in zip(blobs, chunk_sizes)] qr_codes = [[make_qr(chunk) for chunk in chks] for chks in chunks] @@ -118,7 +119,7 @@ class TransactionsOut: required=True, ) - def handle_transaction_output(self, output: List[TransactionRecord]) -> None: + def handle_transaction_output(self, output: list[TransactionRecord]) -> None: with open(Path(self.transaction_file_out), "wb") as file: file.write(bytes(TransactionBundle(output))) @@ -148,8 +149,8 @@ class SPIn(_SPTranslation): required=True, ) - def read_sp_input(self, typ: Type[_T_ClvmStreamable]) -> List[_T_ClvmStreamable]: - final_list: List[_T_ClvmStreamable] = [] + def read_sp_input(self, typ: type[_T_ClvmStreamable]) -> list[_T_ClvmStreamable]: + final_list: list[_T_ClvmStreamable] = [] for filename in self.signer_protocol_input: # pylint: disable=not-an-iterable with open(Path(filename), "rb") as file: final_list.append( @@ -182,7 +183,7 @@ class SPOut(QrCodeDisplay, _SPTranslation): help="The file(s) to output to (if --output-format=file)", ) - def handle_clvm_output(self, outputs: List[Streamable]) -> None: + def handle_clvm_output(self, outputs: list[Streamable]) -> None: translation_layer = ALL_TRANSLATION_LAYERS[self.translation] if self.translation != "none" else None if self.output_format == "hex": for output in outputs: @@ -219,7 +220,7 @@ class GatherSigningInfoCMD: async def run(self) -> None: async with self.rpc_info.wallet_rpc() as wallet_rpc: - spends: List[Spend] = [ + spends: list[Spend] = [ Spend.from_coin_spend(cs) for tx in self.txs_in.transaction_bundle.txs if tx.spend_bundle is not None @@ -240,19 +241,19 @@ class ApplySignaturesCMD: async def run(self) -> None: async with self.rpc_info.wallet_rpc() as wallet_rpc: - signing_responses: List[SigningResponse] = self.sp_in.read_sp_input(SigningResponse) - spends: List[Spend] = [ + signing_responses: list[SigningResponse] = self.sp_in.read_sp_input(SigningResponse) + spends: list[Spend] = [ Spend.from_coin_spend(cs) for tx in self.txs_in.transaction_bundle.txs if tx.spend_bundle is not None for cs in tx.spend_bundle.coin_spends ] - signed_transactions: List[SignedTransaction] = ( + signed_transactions: list[SignedTransaction] = ( await wallet_rpc.client.apply_signatures( ApplySignatures(spends=spends, signing_responses=signing_responses) ) ).signed_transactions - signed_spends: List[Spend] = [spend for tx in signed_transactions for spend in tx.transaction_info.spends] + signed_spends: list[Spend] = [spend for tx in signed_transactions for spend in tx.transaction_info.spends] final_signature: G2Element = G2Element() for signature in [sig for tx in signed_transactions for sig in tx.signatures]: if signature.type != "bls_12381_aug_scheme": # pragma: no cover @@ -260,7 +261,7 @@ async def run(self) -> None: return final_signature = AugSchemeMPL.aggregate([final_signature, G2Element.from_bytes(signature.signature)]) new_spend_bundle = WalletSpendBundle([spend.as_coin_spend() for spend in signed_spends], final_signature) - new_transactions: List[TransactionRecord] = [ + new_transactions: list[TransactionRecord] = [ replace( self.txs_in.transaction_bundle.txs[0], spend_bundle=new_spend_bundle, name=new_spend_bundle.name() ), @@ -277,7 +278,7 @@ class ExecuteSigningInstructionsCMD: async def run(self) -> None: async with self.rpc_info.wallet_rpc() as wallet_rpc: - signing_instructions: List[SigningInstructions] = self.sp_in.read_sp_input(SigningInstructions) + signing_instructions: list[SigningInstructions] = self.sp_in.read_sp_input(SigningInstructions) self.sp_out.handle_clvm_output( [ signing_response diff --git a/chia/cmds/sim_funcs.py b/chia/cmds/sim_funcs.py index bf0cde513d2c..91567f65698b 100644 --- a/chia/cmds/sim_funcs.py +++ b/chia/cmds/sim_funcs.py @@ -5,7 +5,7 @@ import sys from pathlib import Path, PureWindowsPath from random import randint -from typing import Any, Dict, List, Optional +from typing import Any, Optional from aiohttp import ClientConnectorError from chia_rs import PrivateKey @@ -47,7 +47,7 @@ def create_chia_directory( plot_directory: Optional[str], auto_farm: Optional[bool], docker_mode: bool, -) -> Dict[str, Any]: +) -> dict[str, Any]: """ This function creates a new chia directory and returns a heavily modified config, suitable for use in the simulator. @@ -57,7 +57,7 @@ def create_chia_directory( if not chia_root.is_dir() or not Path(chia_root / "config" / "config.yaml").exists(): # create chia directories & load config chia_init(chia_root, testnet=True, fix_ssl_permissions=True) - config: Dict[str, Any] = load_config(chia_root, "config.yaml") + config: dict[str, Any] = load_config(chia_root, "config.yaml") # apply standard block-tools config. config["full_node"]["send_uncompact_interval"] = 0 config["full_node"]["target_uncompact_proofs"] = 30 @@ -243,7 +243,7 @@ def select_fingerprint( return fingerprint -async def generate_plots(config: Dict[str, Any], root_path: Path, fingerprint: int, bitfield: bool) -> None: +async def generate_plots(config: dict[str, Any], root_path: Path, fingerprint: int, bitfield: bool) -> None: """ Pre-Generate plots for the new simulator instance. """ @@ -345,14 +345,14 @@ def print_coin_record( async def print_coin_records( - config: Dict[str, Any], + config: dict[str, Any], node_client: SimulatorFullNodeRpcClient, include_reward_coins: bool, include_spent: bool = False, ) -> None: import sys - coin_records: List[CoinRecord] = await node_client.get_all_coins(include_spent) + coin_records: list[CoinRecord] = await node_client.get_all_coins(include_spent) coin_records = [coin_record for coin_record in coin_records if not coin_record.coinbase or include_reward_coins] address_prefix = config["network_overrides"]["config"][config["selected_network"]]["address_prefix"] name = "mojo" @@ -382,7 +382,7 @@ async def print_coin_records( break -async def print_wallets(config: Dict[str, Any], node_client: SimulatorFullNodeRpcClient) -> None: +async def print_wallets(config: dict[str, Any], node_client: SimulatorFullNodeRpcClient) -> None: ph_and_amount = await node_client.get_all_puzzle_hashes() address_prefix = config["network_overrides"]["config"][config["selected_network"]]["address_prefix"] name = "mojo" diff --git a/chia/cmds/start_funcs.py b/chia/cmds/start_funcs.py index 93afac9087a4..6340abb0704e 100644 --- a/chia/cmds/start_funcs.py +++ b/chia/cmds/start_funcs.py @@ -7,7 +7,7 @@ import sys from concurrent.futures import ThreadPoolExecutor from pathlib import Path -from typing import Any, Dict, Optional +from typing import Any, Optional from chia.cmds.passphrase_funcs import get_current_passphrase from chia.daemon.client import DaemonProxy, connect_to_daemon_and_validate @@ -42,7 +42,7 @@ def launch_start_daemon(root_path: Path) -> subprocess.Popen: async def create_start_daemon_connection( - root_path: Path, config: Dict[str, Any], *, skip_keyring: bool + root_path: Path, config: dict[str, Any], *, skip_keyring: bool ) -> Optional[DaemonProxy]: connection = await connect_to_daemon_and_validate(root_path, config) if connection is None: @@ -75,7 +75,7 @@ async def create_start_daemon_connection( async def async_start( - root_path: Path, config: Dict[str, Any], group: tuple[str, ...], restart: bool, *, skip_keyring: bool + root_path: Path, config: dict[str, Any], group: tuple[str, ...], restart: bool, *, skip_keyring: bool ) -> None: try: daemon = await create_start_daemon_connection(root_path, config, skip_keyring=skip_keyring) diff --git a/chia/cmds/stop.py b/chia/cmds/stop.py index 9404d5d03598..a0b3621cbcc3 100644 --- a/chia/cmds/stop.py +++ b/chia/cmds/stop.py @@ -3,7 +3,7 @@ import asyncio import sys from pathlib import Path -from typing import Any, Dict +from typing import Any import click @@ -11,7 +11,7 @@ from chia.util.service_groups import all_groups, services_for_groups -async def async_stop(root_path: Path, config: Dict[str, Any], group: tuple[str, ...], stop_daemon: bool) -> int: +async def async_stop(root_path: Path, config: dict[str, Any], group: tuple[str, ...], stop_daemon: bool) -> int: from chia.daemon.client import connect_to_daemon_and_validate daemon = await connect_to_daemon_and_validate(root_path, config) diff --git a/chia/cmds/units.py b/chia/cmds/units.py index d352d4813bc7..362dcd92a018 100644 --- a/chia/cmds/units.py +++ b/chia/cmds/units.py @@ -1,10 +1,8 @@ from __future__ import annotations -from typing import Dict - # The rest of the codebase uses mojos everywhere. # Only use these units for user facing interfaces. -units: Dict[str, int] = { +units: dict[str, int] = { "chia": 10**12, # 1 chia (XCH) is 1,000,000,000,000 mojo (1 trillion) "mojo": 1, "cat": 10**3, # 1 CAT is 1000 CAT mojos diff --git a/chia/cmds/wallet.py b/chia/cmds/wallet.py index e4ee3e217159..497a3592f569 100644 --- a/chia/cmds/wallet.py +++ b/chia/cmds/wallet.py @@ -2,7 +2,8 @@ import asyncio import pathlib -from typing import List, Optional, Sequence +from collections.abc import Sequence +from typing import Optional import click @@ -216,7 +217,7 @@ def send_cmd( clawback_time: int, push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: from .wallet_funcs import send return asyncio.run( @@ -328,7 +329,7 @@ def clawback( force: bool, push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: from .wallet_funcs import spend_clawback return asyncio.run( @@ -598,7 +599,7 @@ def take_offer_cmd( reuse: bool, push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: from .wallet_funcs import take_offer return asyncio.run( @@ -635,7 +636,7 @@ def cancel_offer_cmd( fee: uint64, push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: from .wallet_funcs import cancel_offer return asyncio.run( @@ -698,7 +699,7 @@ def did_create_wallet_cmd( fee: uint64, push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: from .wallet_funcs import create_did_wallet return asyncio.run( @@ -815,7 +816,7 @@ def did_update_metadata_cmd( reuse: bool, push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: from .wallet_funcs import update_did_metadata return asyncio.run( @@ -912,11 +913,11 @@ def did_message_spend_cmd( coin_announcements: Optional[str], push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: from .wallet_funcs import did_message_spend - puzzle_list: List[str] = [] - coin_list: List[str] = [] + puzzle_list: list[str] = [] + coin_list: list[str] = [] if puzzle_announcements is not None: try: puzzle_list = puzzle_announcements.split(",") @@ -982,7 +983,7 @@ def did_transfer_did( reuse: bool, push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: from .wallet_funcs import transfer_did return asyncio.run( @@ -1107,7 +1108,7 @@ def nft_mint_cmd( reuse: bool, push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: from .wallet_funcs import mint_nft if metadata_uris is None: @@ -1180,7 +1181,7 @@ def nft_add_uri_cmd( reuse: bool, push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: from .wallet_funcs import add_uri_to_nft return asyncio.run( @@ -1231,7 +1232,7 @@ def nft_transfer_cmd( reuse: bool, push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: from .wallet_funcs import transfer_nft return asyncio.run( @@ -1298,7 +1299,7 @@ def nft_set_did_cmd( reuse: bool, push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: from .wallet_funcs import set_nft_did return asyncio.run( @@ -1379,7 +1380,7 @@ def send_notification_cmd( fee: uint64, push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: from .wallet_funcs import send_notification message_bytes: bytes = bytes(message, "utf8") @@ -1475,7 +1476,7 @@ def mint_vc_cmd( fee: uint64, push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: from .wallet_funcs import mint_vc return asyncio.run( @@ -1559,7 +1560,7 @@ def spend_vc_cmd( reuse_puzhash: bool, push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: from .wallet_funcs import spend_vc return asyncio.run( @@ -1659,7 +1660,7 @@ def revoke_vc_cmd( reuse_puzhash: bool, push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: from .wallet_funcs import revoke_vc return asyncio.run( @@ -1726,7 +1727,7 @@ def approve_r_cats_cmd( reuse: bool, push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: from .wallet_funcs import approve_r_cats return asyncio.run( diff --git a/chia/cmds/wallet_funcs.py b/chia/cmds/wallet_funcs.py index 3215a3f1c301..e1c8da215f79 100644 --- a/chia/cmds/wallet_funcs.py +++ b/chia/cmds/wallet_funcs.py @@ -6,9 +6,10 @@ import pathlib import sys import time +from collections.abc import Awaitable, Sequence from datetime import datetime, timezone from decimal import Decimal -from typing import Any, Awaitable, Callable, Dict, List, Optional, Sequence, Tuple, Union +from typing import Any, Callable, Optional, Union from chia.cmds.cmds_util import ( CMDTXConfigLoader, @@ -44,7 +45,7 @@ from chia.wallet.vc_wallet.vc_store import VCProofs from chia.wallet.wallet_coin_store import GetCoinRecords -CATNameResolver = Callable[[bytes32], Awaitable[Optional[Tuple[Optional[uint32], str]]]] +CATNameResolver = Callable[[bytes32], Awaitable[Optional[tuple[Optional[uint32], str]]]] transaction_type_descriptions = { TransactionType.INCOMING_TX: "received", @@ -69,7 +70,7 @@ def print_transaction( name: str, address_prefix: str, mojo_per_unit: int, - coin_record: Optional[Dict[str, Any]] = None, + coin_record: Optional[dict[str, Any]] = None, ) -> None: if verbose: print(tx) @@ -124,7 +125,7 @@ async def get_wallet_type(wallet_id: int, wallet_client: WalletRpcClient) -> Wal async def get_unit_name_for_wallet_id( - config: Dict[str, Any], + config: dict[str, Any], wallet_type: WalletType, wallet_id: int, wallet_client: WalletRpcClient, @@ -197,7 +198,7 @@ async def get_transactions( [TransactionType.INCOMING_CLAWBACK_RECEIVE, TransactionType.INCOMING_CLAWBACK_SEND] ) ) - txs: List[TransactionRecord] = await wallet_client.get_transactions( + txs: list[TransactionRecord] = await wallet_client.get_transactions( wallet_id, start=offset, end=(offset + limit), sort_key=sort_key, reverse=reverse, type_filter=type_filter ) @@ -224,7 +225,7 @@ async def get_transactions( for j in range(0, num_per_screen): if i + j + skipped >= len(txs): break - coin_record: Optional[Dict[str, Any]] = None + coin_record: Optional[dict[str, Any]] = None if txs[i + j + skipped].type in CLAWBACK_INCOMING_TRANSACTION_TYPES: coin_records = await wallet_client.get_coin_records( GetCoinRecords(coin_id_filter=HashFilter.include([txs[i + j + skipped].additions[0].name()])) @@ -275,7 +276,7 @@ async def send( clawback_time_lock: int, push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): if memo is None: memos = None @@ -393,7 +394,7 @@ async def update_derivation_index(wallet_rpc_port: Optional[int], fp: Optional[i async def add_token(wallet_rpc_port: Optional[int], fp: Optional[int], asset_id: bytes32, token_name: str) -> None: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, _): - existing_info: Optional[Tuple[Optional[uint32], str]] = await wallet_client.cat_asset_id_to_name(asset_id) + existing_info: Optional[tuple[Optional[uint32], str]] = await wallet_client.cat_asset_id_to_name(asset_id) if existing_info is None or existing_info[0] is None: response = await wallet_client.create_wallet_for_existing_cat(asset_id) wallet_id = response["wallet_id"] @@ -420,11 +421,11 @@ async def make_offer( if offers == [] or requests == []: print("Not creating offer: Must be offering and requesting at least one asset") else: - offer_dict: Dict[Union[uint32, str], int] = {} - driver_dict: Dict[str, Any] = {} - printable_dict: Dict[str, Tuple[str, int, int]] = {} # Dict[asset_name, Tuple[amount, unit, multiplier]] - royalty_asset_dict: Dict[Any, Tuple[Any, uint16]] = {} - fungible_asset_dict: Dict[Any, uint64] = {} + offer_dict: dict[Union[uint32, str], int] = {} + driver_dict: dict[str, Any] = {} + printable_dict: dict[str, tuple[str, int, int]] = {} # Dict[asset_name, Tuple[amount, unit, multiplier]] + royalty_asset_dict: dict[Any, tuple[Any, uint16]] = {} + fungible_asset_dict: dict[Any, uint64] = {} for item in [*offers, *requests]: name, amount = tuple(item.split(":")[0:2]) try: @@ -516,10 +517,10 @@ async def make_offer( print(f"Including Fees: {Decimal(fee) / units['chia']} XCH, {fee} mojos") if royalty_asset_dict != {}: - royalty_summary: Dict[Any, List[Dict[str, Any]]] = await wallet_client.nft_calculate_royalties( + royalty_summary: dict[Any, list[dict[str, Any]]] = await wallet_client.nft_calculate_royalties( royalty_asset_dict, fungible_asset_dict ) - total_amounts_requested: Dict[Any, int] = {} + total_amounts_requested: dict[Any, int] = {} print() print("Royalties Summary:") for nft_id, summaries in royalty_summary.items(): @@ -574,7 +575,7 @@ def timestamp_to_time(timestamp: int) -> str: async def print_offer_summary( - cat_name_resolver: CATNameResolver, sum_dict: Dict[str, int], has_fee: bool = False, network_xch: str = "XCH" + cat_name_resolver: CATNameResolver, sum_dict: dict[str, int], has_fee: bool = False, network_xch: str = "XCH" ) -> None: for asset_id, amount in sum_dict.items(): description: str = "" @@ -625,7 +626,7 @@ async def print_trade_record(record: TradeRecord, wallet_client: WalletRpcClient print("Summary:") offer = Offer.from_bytes(record.offer) offered, requested, _, _ = offer.summary() - outbound_balances: Dict[str, int] = offer.get_pending_amounts() + outbound_balances: dict[str, int] = offer.get_pending_amounts() fees: Decimal = Decimal(offer.fees()) cat_name_resolver = wallet_client.cat_asset_id_to_name print(" OFFERED:") @@ -661,7 +662,7 @@ async def get_offers( ) -> None: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): file_contents: bool = (filepath is not None) or summaries - records: List[TradeRecord] = [] + records: list[TradeRecord] = [] if offer_id is None: batch_size: int = 10 start: int = 0 @@ -669,7 +670,7 @@ async def get_offers( # Traverse offers page by page while True: - new_records: List[TradeRecord] = await wallet_client.get_all_offers( + new_records: list[TradeRecord] = await wallet_client.get_all_offers( start, end, reverse=reverse, @@ -705,7 +706,7 @@ async def take_offer( examine_only: bool, push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): if os.path.exists(file): filepath = pathlib.Path(file) @@ -732,7 +733,7 @@ async def take_offer( print() - royalty_asset_dict: Dict[Any, Tuple[Any, uint16]] = {} + royalty_asset_dict: dict[Any, tuple[Any, uint16]] = {} for royalty_asset_id in nft_coin_ids_supporting_royalties_from_offer(offer): if royalty_asset_id.hex() in offered: percentage, address = await get_nft_royalty_percentage_and_address(royalty_asset_id, wallet_client) @@ -742,7 +743,7 @@ async def take_offer( ) if royalty_asset_dict != {}: - fungible_asset_dict: Dict[Any, uint64] = {} + fungible_asset_dict: dict[Any, uint64] = {} for fungible_asset_id in fungible_assets_from_offer(offer): fungible_asset_id_str = fungible_asset_id.hex() if fungible_asset_id is not None else "xch" if fungible_asset_id_str in requested: @@ -756,10 +757,10 @@ async def take_offer( fungible_asset_dict[nft_royalty_currency] = uint64(requested[fungible_asset_id_str]) if fungible_asset_dict != {}: - royalty_summary: Dict[Any, List[Dict[str, Any]]] = await wallet_client.nft_calculate_royalties( + royalty_summary: dict[Any, list[dict[str, Any]]] = await wallet_client.nft_calculate_royalties( royalty_asset_dict, fungible_asset_dict ) - total_amounts_requested: Dict[Any, int] = {} + total_amounts_requested: dict[Any, int] = {} print("Royalties Summary:") for nft_id, summaries in royalty_summary.items(): print(f" - For {nft_id}:") @@ -810,7 +811,7 @@ async def cancel_offer( secure: bool, push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): trade_record = await wallet_client.get_offer(offer_id, file_contents=True) await print_trade_record(trade_record, wallet_client, summaries=True) @@ -832,7 +833,7 @@ async def cancel_offer( return res.transactions -def wallet_coin_unit(typ: WalletType, address_prefix: str) -> Tuple[str, int]: +def wallet_coin_unit(typ: WalletType, address_prefix: str) -> tuple[str, int]: if typ in {WalletType.CAT, WalletType.CRCAT}: return "", units["cat"] if typ in [WalletType.STANDARD_WALLET, WalletType.POOLING_WALLET, WalletType.MULTI_SIG]: @@ -925,7 +926,7 @@ async def print_balances( print(" ") trusted_peers: dict[str, str] = config["wallet"].get("trusted_peers", {}) - trusted_cidrs: List[str] = config["wallet"].get("trusted_cidrs", []) + trusted_cidrs: list[str] = config["wallet"].get("trusted_cidrs", []) await print_connections(wallet_client, trusted_peers, trusted_cidrs) @@ -937,7 +938,7 @@ async def create_did_wallet( amount: int, push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): try: response = await wallet_client.create_new_did_wallet( @@ -1008,7 +1009,7 @@ async def update_did_metadata( reuse_puzhash: bool, push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): try: response = await wallet_client.update_did_metadata( @@ -1035,11 +1036,11 @@ async def did_message_spend( wallet_rpc_port: Optional[int], fp: Optional[int], did_wallet_id: int, - puzzle_announcements: List[str], - coin_announcements: List[str], + puzzle_announcements: list[str], + coin_announcements: list[str], push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): try: response = await wallet_client.did_message_spend( @@ -1069,7 +1070,7 @@ async def transfer_did( reuse_puzhash: Optional[bool], push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): try: @@ -1141,11 +1142,11 @@ async def mint_nft( target_cli_address: Optional[CliAddress], no_did_ownership: bool, hash: str, - uris: List[str], + uris: list[str], metadata_hash: Optional[str], - metadata_uris: List[str], + metadata_uris: list[str], license_hash: Optional[str], - license_uris: List[str], + license_uris: list[str], edition_total: Optional[int], edition_number: Optional[int], fee: uint64, @@ -1153,7 +1154,7 @@ async def mint_nft( reuse_puzhash: Optional[bool], push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): royalty_address = royalty_cli_address.validate_address_type(AddressType.XCH) if royalty_cli_address else None target_address = target_cli_address.validate_address_type(AddressType.XCH) if target_cli_address else None @@ -1215,7 +1216,7 @@ async def add_uri_to_nft( reuse_puzhash: Optional[bool], push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): try: if len([x for x in (uri, metadata_uri, license_uri) if x is not None]) > 1: @@ -1263,7 +1264,7 @@ async def transfer_nft( reuse_puzhash: Optional[bool], push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): try: target_address = target_cli_address.validate_address_type(AddressType.XCH) @@ -1288,7 +1289,7 @@ async def transfer_nft( return [] -def print_nft_info(nft: NFTInfo, *, config: Dict[str, Any]) -> None: +def print_nft_info(nft: NFTInfo, *, config: dict[str, Any]) -> None: indent: str = " " owner_did = None if nft.owner_did is None else encode_puzzle_hash(nft.owner_did, AddressType.DID.hrp(config)) minter_did = None if nft.minter_did is None else encode_puzzle_hash(nft.minter_did, AddressType.DID.hrp(config)) @@ -1353,7 +1354,7 @@ async def set_nft_did( reuse_puzhash: Optional[bool], push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): try: response = await wallet_client.set_nft_did( @@ -1387,7 +1388,7 @@ async def get_nft_info(wallet_rpc_port: Optional[int], fp: Optional[int], nft_co async def get_nft_royalty_percentage_and_address( nft_coin_id: bytes32, wallet_client: WalletRpcClient -) -> Tuple[uint16, bytes32]: +) -> tuple[uint16, bytes32]: info = NFTInfo.from_json_dict((await wallet_client.get_nft_info(nft_coin_id.hex()))["nft_info"]) assert info.royalty_puzzle_hash is not None percentage = uint16(info.royalty_percentage) if info.royalty_percentage is not None else 0 @@ -1395,11 +1396,11 @@ async def get_nft_royalty_percentage_and_address( def calculate_nft_royalty_amount( - offered: Dict[str, Any], requested: Dict[str, Any], nft_coin_id: bytes32, nft_royalty_percentage: int -) -> Tuple[str, int, int]: + offered: dict[str, Any], requested: dict[str, Any], nft_coin_id: bytes32, nft_royalty_percentage: int +) -> tuple[str, int, int]: nft_asset_id = nft_coin_id.hex() - amount_dict: Dict[str, Any] = requested if nft_asset_id in offered else offered - amounts: List[Tuple[str, int]] = list(amount_dict.items()) + amount_dict: dict[str, Any] = requested if nft_asset_id in offered else offered + amounts: list[tuple[str, int]] = list(amount_dict.items()) if len(amounts) != 1 or not isinstance(amounts[0][1], int): raise ValueError("Royalty enabled NFTs only support offering/requesting one NFT for one currency") @@ -1410,7 +1411,7 @@ def calculate_nft_royalty_amount( return royalty_asset_id, royalty_amount, total_amount_requested -def driver_dict_asset_is_nft_supporting_royalties(driver_dict: Dict[bytes32, PuzzleInfo], asset_id: bytes32) -> bool: +def driver_dict_asset_is_nft_supporting_royalties(driver_dict: dict[bytes32, PuzzleInfo], asset_id: bytes32) -> bool: asset_dict: PuzzleInfo = driver_dict[asset_id] return asset_dict.check_type( [ @@ -1421,7 +1422,7 @@ def driver_dict_asset_is_nft_supporting_royalties(driver_dict: Dict[bytes32, Puz ) -def driver_dict_asset_is_fungible(driver_dict: Dict[bytes32, PuzzleInfo], asset_id: bytes32) -> bool: +def driver_dict_asset_is_fungible(driver_dict: dict[bytes32, PuzzleInfo], asset_id: bytes32) -> bool: asset_dict: PuzzleInfo = driver_dict[asset_id] return not asset_dict.check_type( [ @@ -1430,13 +1431,13 @@ def driver_dict_asset_is_fungible(driver_dict: Dict[bytes32, PuzzleInfo], asset_ ) -def nft_coin_ids_supporting_royalties_from_offer(offer: Offer) -> List[bytes32]: +def nft_coin_ids_supporting_royalties_from_offer(offer: Offer) -> list[bytes32]: return [ key for key in offer.driver_dict.keys() if driver_dict_asset_is_nft_supporting_royalties(offer.driver_dict, key) ] -def fungible_assets_from_offer(offer: Offer) -> List[Optional[bytes32]]: +def fungible_assets_from_offer(offer: Offer) -> list[Optional[bytes32]]: return [ asset for asset in offer.arbitrage() if asset is None or driver_dict_asset_is_fungible(offer.driver_dict, asset) ] @@ -1451,7 +1452,7 @@ async def send_notification( cli_amount: CliAmount, push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): amount: uint64 = cli_amount.convert_amount(units["chia"]) @@ -1547,7 +1548,7 @@ async def spend_clawback( force: bool = False, push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, _, _): tx_ids = [] for tid in tx_ids_str.split(","): @@ -1577,7 +1578,7 @@ async def mint_vc( target_address: Optional[CliAddress], push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): res = await wallet_client.vc_mint( did.validate_address_type_get_ph(AddressType.DID), @@ -1637,7 +1638,7 @@ async def spend_vc( reuse_puzhash: bool, push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): txs = ( await wallet_client.vc_spend( @@ -1676,7 +1677,7 @@ async def add_proof_reveal( print("Must specify at least one proof") return - proof_dict: Dict[str, str] = {proof: "1" for proof in proofs} + proof_dict: dict[str, str] = {proof: "1" for proof in proofs} if root_only: print(f"Proof Hash: {VCProofs(proof_dict).root()}") return @@ -1688,7 +1689,7 @@ async def add_proof_reveal( async def get_proofs_for_root(wallet_rpc_port: Optional[int], fp: Optional[int], proof_hash: str) -> None: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): - proof_dict: Dict[str, str] = await wallet_client.vc_get_proofs_for_root(bytes32.from_hexstr(proof_hash)) + proof_dict: dict[str, str] = await wallet_client.vc_get_proofs_for_root(bytes32.from_hexstr(proof_hash)) print("Proofs:") for proof in proof_dict: print(f" - {proof}") @@ -1703,7 +1704,7 @@ async def revoke_vc( reuse_puzhash: bool, push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fp) as (wallet_client, fingerprint, config): if parent_coin_id is None: if vc_id is None: @@ -1754,7 +1755,7 @@ async def approve_r_cats( reuse: bool, push: bool, condition_valid_times: ConditionValidTimes, -) -> List[TransactionRecord]: +) -> list[TransactionRecord]: async with get_wallet_client(wallet_rpc_port, fingerprint) as (wallet_client, fingerprint, config): if wallet_client is None: return diff --git a/chia/consensus/block_body_validation.py b/chia/consensus/block_body_validation.py index 749b6763b850..c630e815282c 100644 --- a/chia/consensus/block_body_validation.py +++ b/chia/consensus/block_body_validation.py @@ -2,8 +2,9 @@ import collections import logging +from collections.abc import Awaitable, Collection from dataclasses import dataclass, field -from typing import Awaitable, Callable, Collection, Dict, List, Optional, Set, Tuple, Union +from typing import Callable, Optional, Union from chia_rs import AugSchemeMPL, BLSCache, G1Element, SpendBundleConditions from chiabip158 import PyBIP158 @@ -68,13 +69,13 @@ class ForkInfo: # the header hash of the peak block of this fork peak_hash: bytes32 # The additions include coinbase additions - additions_since_fork: Dict[bytes32, ForkAdd] = field(default_factory=dict) + additions_since_fork: dict[bytes32, ForkAdd] = field(default_factory=dict) # coin-id, ForkRem - removals_since_fork: Dict[bytes32, ForkRem] = field(default_factory=dict) + removals_since_fork: dict[bytes32, ForkRem] = field(default_factory=dict) # the header hashes of the blocks, starting with the one-past fork_height # i.e. the header hash of fork_height + 1 is stored in block_hashes[0] # followed by fork_height + 2, and so on. - block_hashes: List[bytes32] = field(default_factory=list) + block_hashes: list[bytes32] = field(default_factory=list) def reset(self, fork_height: int, header_hash: bytes32) -> None: self.fork_height = fork_height @@ -121,7 +122,7 @@ def rollback(self, header_hash: bytes32, height: int) -> None: async def validate_block_body( constants: ConsensusConstants, records: BlockRecordsProtocol, - get_coin_records: Callable[[Collection[bytes32]], Awaitable[List[CoinRecord]]], + get_coin_records: Callable[[Collection[bytes32]], Awaitable[list[CoinRecord]]], block: Union[FullBlock, UnfinishedBlock], height: uint32, conds: Optional[SpendBundleConditions], @@ -129,7 +130,7 @@ async def validate_block_body( bls_cache: Optional[BLSCache], *, validate_signature: bool = True, -) -> Tuple[Optional[Err], Optional[SpendBundleConditions]]: +) -> tuple[Optional[Err], Optional[SpendBundleConditions]]: """ This assumes the header block has been completely validated. Validates the transactions and body of the block. @@ -180,7 +181,7 @@ async def validate_block_body( assert block.foliage_transaction_block is not None # keeps track of the reward coins that need to be incorporated - expected_reward_coins: Set[Coin] = set() + expected_reward_coins: set[Coin] = set() # 3. The transaction info hash in the Foliage block must match the transaction info if block.foliage_transaction_block.transactions_info_hash != std_hash(block.transactions_info): @@ -246,14 +247,14 @@ async def validate_block_body( if len(block.transactions_info.reward_claims_incorporated) != len(expected_reward_coins): return Err.INVALID_REWARD_COINS, None - removals: List[bytes32] = [] + removals: list[bytes32] = [] # we store coins paired with their names in order to avoid computing the # coin name multiple times, we store it next to the coin while validating # the block - coinbase_additions: List[Tuple[Coin, bytes32]] = [(c, c.name()) for c in expected_reward_coins] - additions: List[Tuple[Coin, bytes32]] = [] - removals_puzzle_dic: Dict[bytes32, bytes32] = {} + coinbase_additions: list[tuple[Coin, bytes32]] = [(c, c.name()) for c in expected_reward_coins] + additions: list[tuple[Coin, bytes32]] = [] + removals_puzzle_dic: dict[bytes32, bytes32] = {} cost: uint64 = uint64(0) # In header validation we check that timestamp is not more than 5 minutes into the future @@ -319,7 +320,7 @@ async def validate_block_body( if block.transactions_info.cost != cost: return Err.INVALID_BLOCK_COST, None - additions_dic: Dict[bytes32, Coin] = {} + additions_dic: dict[bytes32, Coin] = {} # 10. Check additions for max coin amount # Be careful to check for 64 bit overflows in other languages. This is the max 64 bit unsigned integer # We will not even reach here because Coins do type checking (uint64) @@ -342,7 +343,7 @@ async def validate_block_body( return root_error, None # 12. The additions and removals must result in the correct filter - byte_array_tx: List[bytearray] = [] + byte_array_tx: list[bytearray] = [] for coin, _ in additions + coinbase_additions: byte_array_tx.append(bytearray(coin.puzzle_hash)) @@ -374,10 +375,10 @@ async def validate_block_body( assert fork_info.fork_height < height assert fork_info.peak_height == height - 1 - removal_coin_records: Dict[bytes32, CoinRecord] = {} + removal_coin_records: dict[bytes32, CoinRecord] = {} # the removed coins we need to look up from the DB # i.e. all non-ephemeral coins - removals_from_db: List[bytes32] = [] + removals_from_db: list[bytes32] = [] for rem in removals: if rem in additions_dic: # Ephemeral coin @@ -403,7 +404,7 @@ async def validate_block_body( # some coin spends we need to ensure exist in the fork branch. Both coins we # can't find in the DB, but also coins that were spent after the fork point - look_in_fork: List[bytes32] = [] + look_in_fork: list[bytes32] = [] for unspent in unspent_records: if unspent.confirmed_block_index <= fork_info.fork_height: # Spending something in the current chain, confirmed before fork @@ -418,7 +419,7 @@ async def validate_block_body( if len(unspent_records) != len(removals_from_db): # some coins could not be found in the DB. We need to find out which # ones and look for them in additions_since_fork - found: Set[bytes32] = {u.name for u in unspent_records} + found: set[bytes32] = {u.name for u in unspent_records} for rem in removals_from_db: if rem in found: continue @@ -489,8 +490,8 @@ async def validate_block_body( return error, None # create hash_key list for aggsig check - pairs_pks: List[G1Element] = [] - pairs_msgs: List[bytes] = [] + pairs_pks: list[G1Element] = [] + pairs_msgs: list[bytes] = [] if conds is not None: pairs_pks, pairs_msgs = pkm_pairs(conds, constants.AGG_SIG_ME_ADDITIONAL_DATA) diff --git a/chia/consensus/block_creation.py b/chia/consensus/block_creation.py index dddc617a00e5..66b28e0f8b7e 100644 --- a/chia/consensus/block_creation.py +++ b/chia/consensus/block_creation.py @@ -2,7 +2,8 @@ import logging import random -from typing import Callable, Dict, List, Optional, Sequence, Tuple +from collections.abc import Sequence +from typing import Callable, Optional import chia_rs from chia_rs import G1Element, G2Element, compute_merkle_set_root @@ -56,8 +57,8 @@ def create_foliage( reward_block_unfinished: RewardChainBlockUnfinished, block_generator: Optional[BlockGenerator], aggregate_sig: G2Element, - additions: List[Coin], - removals: List[Coin], + additions: list[Coin], + removals: list[Coin], prev_block: Optional[BlockRecord], blocks: BlockRecordsProtocol, total_iters_sp: uint128, @@ -69,7 +70,7 @@ def create_foliage( seed: bytes, compute_cost: Callable[[BlockGenerator, ConsensusConstants, uint32], uint64], compute_fees: Callable[[Sequence[Coin], Sequence[Coin]], uint64], -) -> Tuple[Foliage, Optional[FoliageTransactionBlock], Optional[TransactionsInfo]]: +) -> tuple[Foliage, Optional[FoliageTransactionBlock], Optional[TransactionsInfo]]: """ Creates a foliage for a given reward chain block. This may or may not be a tx block. In the case of a tx block, the return values are not None. This is called at the signage point, so some of this information may be @@ -111,9 +112,9 @@ def create_foliage( height = uint32(prev_block.height + 1) # Create filter - byte_array_tx: List[bytearray] = [] - tx_additions: List[Coin] = [] - tx_removals: List[bytes32] = [] + byte_array_tx: list[bytearray] = [] + tx_additions: list[Coin] = [] + tx_removals: list[bytes32] = [] pool_target_signature: Optional[G2Element] = get_pool_signature( pool_target, reward_block_unfinished.proof_of_space.pool_public_key @@ -202,10 +203,10 @@ def create_foliage( bip158: PyBIP158 = PyBIP158(byte_array_tx) encoded = bytes(bip158.GetEncoded()) - additions_merkle_items: List[bytes32] = [] + additions_merkle_items: list[bytes32] = [] # Create addition Merkle set - puzzlehash_coin_map: Dict[bytes32, List[bytes32]] = {} + puzzlehash_coin_map: dict[bytes32, list[bytes32]] = {} for coin in tx_additions: if coin.puzzle_hash in puzzlehash_coin_map: @@ -295,10 +296,10 @@ def create_unfinished_block( seed: bytes = b"", block_generator: Optional[BlockGenerator] = None, aggregate_sig: G2Element = G2Element(), - additions: Optional[List[Coin]] = None, - removals: Optional[List[Coin]] = None, + additions: Optional[list[Coin]] = None, + removals: Optional[list[Coin]] = None, prev_block: Optional[BlockRecord] = None, - finished_sub_slots_input: Optional[List[EndOfSubSlotBundle]] = None, + finished_sub_slots_input: Optional[list[EndOfSubSlotBundle]] = None, compute_cost: Callable[[BlockGenerator, ConsensusConstants, uint32], uint64] = compute_block_cost, compute_fees: Callable[[Sequence[Coin], Sequence[Coin]], uint64] = compute_block_fee, ) -> UnfinishedBlock: @@ -334,7 +335,7 @@ def create_unfinished_block( """ if finished_sub_slots_input is None: - finished_sub_slots: List[EndOfSubSlotBundle] = [] + finished_sub_slots: list[EndOfSubSlotBundle] = [] else: finished_sub_slots = finished_sub_slots_input.copy() overflow: bool = sp_iters > ip_iters @@ -428,7 +429,7 @@ def unfinished_block_to_full_block( rc_ip_proof: VDFProof, icc_ip_vdf: Optional[VDFInfo], icc_ip_proof: Optional[VDFProof], - finished_sub_slots: List[EndOfSubSlotBundle], + finished_sub_slots: list[EndOfSubSlotBundle], prev_block: Optional[BlockRecord], blocks: BlockRecordsProtocol, total_iters_sp: uint128, diff --git a/chia/consensus/block_header_validation.py b/chia/consensus/block_header_validation.py index 7b1ca77b04d4..a450e2f4e989 100644 --- a/chia/consensus/block_header_validation.py +++ b/chia/consensus/block_header_validation.py @@ -2,7 +2,7 @@ import logging import time -from typing import Optional, Tuple +from typing import Optional from chia_rs import AugSchemeMPL @@ -48,7 +48,7 @@ def validate_unfinished_header_block( skip_vdf_is_valid: bool = False, check_sub_epoch_summary: bool = True, prev_ses_block: Optional[BlockRecord] = None, -) -> Tuple[Optional[uint64], Optional[ValidationError]]: +) -> tuple[Optional[uint64], Optional[ValidationError]]: """ Validates an unfinished header block. This is a block without the infusion VDFs (unfinished) and without transactions and transaction info (header). Returns (required_iters, error). @@ -838,7 +838,7 @@ def validate_finished_header_block( expected_sub_slot_iters: uint64, check_sub_epoch_summary: bool = True, prev_ses_block: Optional[BlockRecord] = None, -) -> Tuple[Optional[uint64], Optional[ValidationError]]: +) -> tuple[Optional[uint64], Optional[ValidationError]]: """ Fully validates the header of a block. A header block is the same as a full block, but without transactions and transaction info. Returns (required_iters, error). diff --git a/chia/consensus/block_root_validation.py b/chia/consensus/block_root_validation.py index a4815419f63d..215cf6e2e7a6 100644 --- a/chia/consensus/block_root_validation.py +++ b/chia/consensus/block_root_validation.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Dict, List, Optional, Tuple +from typing import Optional from chia_rs import compute_merkle_set_root @@ -12,8 +12,8 @@ def validate_block_merkle_roots( block_additions_root: bytes32, block_removals_root: bytes32, - tx_additions: Optional[List[Tuple[Coin, bytes32]]] = None, - tx_removals: Optional[List[bytes32]] = None, + tx_additions: Optional[list[tuple[Coin, bytes32]]] = None, + tx_removals: Optional[list[bytes32]] = None, ) -> Optional[Err]: if tx_removals is None: tx_removals = [] @@ -21,7 +21,7 @@ def validate_block_merkle_roots( tx_additions = [] # Create addition Merkle set - puzzlehash_coins_map: Dict[bytes32, List[bytes32]] = {} + puzzlehash_coins_map: dict[bytes32, list[bytes32]] = {} for coin, coin_name in tx_additions: if coin.puzzle_hash in puzzlehash_coins_map: @@ -30,7 +30,7 @@ def validate_block_merkle_roots( puzzlehash_coins_map[coin.puzzle_hash] = [coin_name] # Addition Merkle set contains puzzlehash and hash of all coins with that puzzlehash - additions_merkle_items: List[bytes32] = [] + additions_merkle_items: list[bytes32] = [] for puzzle, coin_ids in puzzlehash_coins_map.items(): additions_merkle_items.append(puzzle) additions_merkle_items.append(hash_coin_ids(coin_ids)) diff --git a/chia/consensus/blockchain.py b/chia/consensus/blockchain.py index f423960feca1..b927a2013e4f 100644 --- a/chia/consensus/blockchain.py +++ b/chia/consensus/blockchain.py @@ -9,7 +9,7 @@ from concurrent.futures import Executor, ThreadPoolExecutor from enum import Enum from pathlib import Path -from typing import TYPE_CHECKING, ClassVar, Dict, List, Optional, Set, Tuple, cast +from typing import TYPE_CHECKING, ClassVar, Optional, cast from chia_rs import BLSCache @@ -68,12 +68,12 @@ class AddBlockResult(Enum): class StateChangeSummary: peak: BlockRecord fork_height: uint32 - rolled_back_records: List[CoinRecord] + rolled_back_records: list[CoinRecord] # list of coin-id, puzzle-hash pairs - removals: List[Tuple[bytes32, bytes32]] + removals: list[tuple[bytes32, bytes32]] # new coin and hint - additions: List[Tuple[Coin, Optional[bytes]]] - new_rewards: List[Coin] + additions: list[tuple[Coin, Optional[bytes]]] + new_rewards: list[Coin] class BlockchainMutexPriority(enum.IntEnum): @@ -94,9 +94,9 @@ class Blockchain: # peak of the blockchain _peak_height: Optional[uint32] # All blocks in peak path are guaranteed to be included, can include orphan blocks - __block_records: Dict[bytes32, BlockRecord] + __block_records: dict[bytes32, BlockRecord] # all hashes of blocks in block_record by height, used for garbage collection - __heights_in_cache: Dict[uint32, Set[bytes32]] + __heights_in_cache: dict[uint32, set[bytes32]] # maps block height (of the current heaviest chain) to block hash and sub # epoch summaries __height_map: BlockHeightMap @@ -107,7 +107,7 @@ class Blockchain: # Used to verify blocks in parallel pool: Executor # Set holding seen compact proofs, in order to avoid duplicates. - _seen_compact_proofs: Set[Tuple[VDFInfo, uint32]] + _seen_compact_proofs: set[tuple[VDFInfo, uint32]] # Whether blockchain is shut down or not _shut_down: bool @@ -285,7 +285,7 @@ async def add_block( sub_slot_iters: uint64, fork_info: Optional[ForkInfo] = None, prev_ses_block: Optional[BlockRecord] = None, - ) -> Tuple[AddBlockResult, Optional[Err], Optional[StateChangeSummary]]: + ) -> tuple[AddBlockResult, Optional[Err], Optional[StateChangeSummary]]: """ This method must be called under the blockchain lock Adds a new block into the blockchain, if it's valid and connected to the current @@ -515,7 +515,7 @@ async def _reconsider_peak( block_record: BlockRecord, genesis: bool, fork_info: ForkInfo, - ) -> Tuple[List[BlockRecord], Optional[StateChangeSummary]]: + ) -> tuple[list[BlockRecord], Optional[StateChangeSummary]]: """ When a new block is added, this is called, to check if the new block is the new peak of the chain. This also handles reorgs by reverting blocks which are not in the heaviest chain. @@ -524,7 +524,7 @@ async def _reconsider_peak( """ peak = self.get_peak() - rolled_back_state: Dict[bytes32, CoinRecord] = {} + rolled_back_state: dict[bytes32, CoinRecord] = {} if genesis and peak is not None: return [], None @@ -542,7 +542,7 @@ async def _reconsider_peak( rolled_back_state[coin_record.name] = coin_record # Collects all blocks from fork point to new peak - records_to_add: List[BlockRecord] = [] + records_to_add: list[BlockRecord] = [] if genesis: records_to_add = [block_record] @@ -621,7 +621,7 @@ def get_next_slot_iters(self, header_hash: bytes32, new_slot: bool) -> uint64: async def get_sp_and_ip_sub_slots( self, header_hash: bytes32 - ) -> Optional[Tuple[Optional[EndOfSubSlotBundle], Optional[EndOfSubSlotBundle]]]: + ) -> Optional[tuple[Optional[EndOfSubSlotBundle], Optional[EndOfSubSlotBundle]]]: block: Optional[FullBlock] = await self.block_store.get_full_block(header_hash) if block is None: return None @@ -672,11 +672,11 @@ async def get_sp_and_ip_sub_slots( return None, ip_sub_slot return prev_curr.finished_sub_slots[-1], ip_sub_slot - def get_recent_reward_challenges(self) -> List[Tuple[bytes32, uint128]]: + def get_recent_reward_challenges(self) -> list[tuple[bytes32, uint128]]: peak = self.get_peak() if peak is None: return [] - recent_rc: List[Tuple[bytes32, uint128]] = [] + recent_rc: list[tuple[bytes32, uint128]] = [] curr: Optional[BlockRecord] = peak while curr is not None and len(recent_rc) < 2 * self.constants.MAX_SUB_SLOT_BLOCKS: if curr != peak: @@ -695,7 +695,7 @@ def get_recent_reward_challenges(self) -> List[Tuple[bytes32, uint128]]: async def validate_unfinished_block_header( self, block: UnfinishedBlock, skip_overflow_ss_validation: bool = True - ) -> Tuple[Optional[uint64], Optional[Err]]: + ) -> tuple[Optional[uint64], Optional[Err]]: if len(block.transactions_generator_ref_list) > self.constants.MAX_GENERATOR_REF_LIST_SIZE: return None, Err.TOO_MANY_GENERATOR_REFS @@ -818,7 +818,7 @@ def height_to_block_record(self, height: uint32) -> BlockRecord: raise ValueError(f"Height is not in blockchain: {height}") return self.block_record(header_hash) - def get_ses_heights(self) -> List[uint32]: + def get_ses_heights(self) -> list[uint32]: return self.__height_map.get_ses_heights() def get_ses(self, height: uint32) -> SubEpochSummary: @@ -888,27 +888,27 @@ def clean_block_records(self) -> None: return None self.clean_block_record(self._peak_height - self.constants.BLOCKS_CACHE_SIZE) - async def get_block_records_in_range(self, start: int, stop: int) -> Dict[bytes32, BlockRecord]: + async def get_block_records_in_range(self, start: int, stop: int) -> dict[bytes32, BlockRecord]: return await self.block_store.get_block_records_in_range(start, stop) async def get_header_blocks_in_range( self, start: int, stop: int, tx_filter: bool = True - ) -> Dict[bytes32, HeaderBlock]: + ) -> dict[bytes32, HeaderBlock]: hashes = [] for height in range(start, stop + 1): header_hash: Optional[bytes32] = self.height_to_hash(uint32(height)) if header_hash is not None: hashes.append(header_hash) - blocks: List[FullBlock] = [] + blocks: list[FullBlock] = [] for hash in hashes.copy(): block = self.block_store.block_cache.get(hash) if block is not None: blocks.append(block) hashes.remove(hash) - blocks_on_disk: List[FullBlock] = await self.block_store.get_blocks_by_hash(hashes) + blocks_on_disk: list[FullBlock] = await self.block_store.get_blocks_by_hash(hashes) blocks.extend(blocks_on_disk) - header_blocks: Dict[bytes32, HeaderBlock] = {} + header_blocks: dict[bytes32, HeaderBlock] = {} for block in blocks: if self.height_to_hash(block.height) != block.header_hash: @@ -916,10 +916,10 @@ async def get_header_blocks_in_range( if tx_filter is False: header = get_block_header(block, [], []) else: - tx_additions: List[CoinRecord] = [ + tx_additions: list[CoinRecord] = [ c for c in (await self.coin_store.get_coins_added_at_height(block.height)) if not c.coinbase ] - removed: List[CoinRecord] = await self.coin_store.get_coins_removed_at_height(block.height) + removed: list[CoinRecord] = await self.coin_store.get_coins_removed_at_height(block.height) header = get_block_header( block, [record.coin for record in tx_additions], [record.coin.name() for record in removed] ) @@ -930,19 +930,19 @@ async def get_header_blocks_in_range( async def get_header_block_by_height( self, height: int, header_hash: bytes32, tx_filter: bool = True ) -> Optional[HeaderBlock]: - header_dict: Dict[bytes32, HeaderBlock] = await self.get_header_blocks_in_range(height, height, tx_filter) + header_dict: dict[bytes32, HeaderBlock] = await self.get_header_blocks_in_range(height, height, tx_filter) if len(header_dict) == 0: return None if header_hash not in header_dict: return None return header_dict[header_hash] - async def get_block_records_at(self, heights: List[uint32], batch_size: int = 900) -> List[BlockRecord]: + async def get_block_records_at(self, heights: list[uint32], batch_size: int = 900) -> list[BlockRecord]: """ gets block records by height (only blocks that are part of the chain) """ - records: List[BlockRecord] = [] - hashes: List[bytes32] = [] + records: list[BlockRecord] = [] + hashes: list[bytes32] = [] assert batch_size < self.block_store.db_wrapper.host_parameter_limit for height in heights: header_hash: Optional[bytes32] = self.height_to_hash(height) @@ -970,7 +970,7 @@ async def get_block_record_from_db(self, header_hash: bytes32) -> Optional[Block return ret return await self.block_store.get_block_record(header_hash) - async def prev_block_hash(self, header_hashes: List[bytes32]) -> List[bytes32]: + async def prev_block_hash(self, header_hashes: list[bytes32]) -> list[bytes32]: """ Given a list of block header hashes, returns the previous header hashes for each block, in the order they were passed in. @@ -1007,15 +1007,15 @@ def add_block_record(self, block_record: BlockRecord) -> None: self.__heights_in_cache[block_record.height].add(block_record.header_hash) async def persist_sub_epoch_challenge_segments( - self, ses_block_hash: bytes32, segments: List[SubEpochChallengeSegment] + self, ses_block_hash: bytes32, segments: list[SubEpochChallengeSegment] ) -> None: await self.block_store.persist_sub_epoch_challenge_segments(ses_block_hash, segments) async def get_sub_epoch_challenge_segments( self, ses_block_hash: bytes32, - ) -> Optional[List[SubEpochChallengeSegment]]: - segments: Optional[List[SubEpochChallengeSegment]] = await self.block_store.get_sub_epoch_challenge_segments( + ) -> Optional[list[SubEpochChallengeSegment]]: + segments: Optional[list[SubEpochChallengeSegment]] = await self.block_store.get_sub_epoch_challenge_segments( ses_block_hash ) if segments is None: @@ -1033,9 +1033,9 @@ def seen_compact_proofs(self, vdf_info: VDFInfo, height: uint32) -> bool: self._seen_compact_proofs.add(pot_tuple) return False - async def lookup_block_generators(self, header_hash: bytes32, generator_refs: Set[uint32]) -> Dict[uint32, bytes]: + async def lookup_block_generators(self, header_hash: bytes32, generator_refs: set[uint32]) -> dict[uint32, bytes]: - generators: Dict[uint32, bytes] = {} + generators: dict[uint32, bytes] = {} # if this is empty, we shouldn't have called this function to begin with assert len(generator_refs) @@ -1064,7 +1064,7 @@ async def lookup_block_generators(self, header_hash: bytes32, generator_refs: Se if self.height_to_hash(peak_block.height) != header_hash: peak: Optional[BlockRecord] = self.get_peak() assert peak is not None - reorg_chain: Dict[uint32, bytes32] + reorg_chain: dict[uint32, bytes32] # Then we look up blocks up to fork point one at a time, backtracking reorg_chain, _ = await lookup_fork_chain( self, diff --git a/chia/consensus/blockchain_interface.py b/chia/consensus/blockchain_interface.py index 7ad64b800c89..66a2e9dbd222 100644 --- a/chia/consensus/blockchain_interface.py +++ b/chia/consensus/blockchain_interface.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Dict, List, Optional, Protocol, Set +from typing import Optional, Protocol from chia.consensus.block_record import BlockRecord from chia.types.blockchain_format.sized_bytes import bytes32 @@ -21,13 +21,13 @@ def height_to_block_record(self, height: uint32) -> BlockRecord: ... # given a list of block header hashes, return the header hashes of their # previous blocks. This is not limited to the block record cache, but must # allow any block in the database to be referenced - async def prev_block_hash(self, header_hashes: List[bytes32]) -> List[bytes32]: ... + async def prev_block_hash(self, header_hashes: list[bytes32]) -> list[bytes32]: ... class BlocksProtocol(BlockRecordsProtocol, Protocol): async def lookup_block_generators( - self, header_hash: bytes32, generator_refs: Set[uint32] - ) -> Dict[uint32, bytes]: ... + self, header_hash: bytes32, generator_refs: set[uint32] + ) -> dict[uint32, bytes]: ... async def get_block_record_from_db(self, header_hash: bytes32) -> Optional[BlockRecord]: ... def add_block_record(self, block_record: BlockRecord) -> None: ... @@ -35,22 +35,22 @@ def add_block_record(self, block_record: BlockRecord) -> None: ... class BlockchainInterface(BlocksProtocol, Protocol): def get_peak(self) -> Optional[BlockRecord]: ... def get_peak_height(self) -> Optional[uint32]: ... - def get_ses_heights(self) -> List[uint32]: ... + def get_ses_heights(self) -> list[uint32]: ... def get_ses(self, height: uint32) -> SubEpochSummary: ... async def contains_block_from_db(self, header_hash: bytes32) -> bool: ... - async def get_block_records_in_range(self, start: int, stop: int) -> Dict[bytes32, BlockRecord]: ... + async def get_block_records_in_range(self, start: int, stop: int) -> dict[bytes32, BlockRecord]: ... async def get_header_blocks_in_range( self, start: int, stop: int, tx_filter: bool = True - ) -> Dict[bytes32, HeaderBlock]: ... + ) -> dict[bytes32, HeaderBlock]: ... - async def get_block_records_at(self, heights: List[uint32]) -> List[BlockRecord]: ... + async def get_block_records_at(self, heights: list[uint32]) -> list[BlockRecord]: ... async def persist_sub_epoch_challenge_segments( - self, sub_epoch_summary_hash: bytes32, segments: List[SubEpochChallengeSegment] + self, sub_epoch_summary_hash: bytes32, segments: list[SubEpochChallengeSegment] ) -> None: ... async def get_sub_epoch_challenge_segments( self, sub_epoch_summary_hash: bytes32, - ) -> Optional[List[SubEpochChallengeSegment]]: ... + ) -> Optional[list[SubEpochChallengeSegment]]: ... diff --git a/chia/consensus/default_constants.py b/chia/consensus/default_constants.py index a24d0aa37a0e..64d56b7a8f64 100644 --- a/chia/consensus/default_constants.py +++ b/chia/consensus/default_constants.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Any, Dict +from typing import Any from chia.types.blockchain_format.sized_bytes import bytes32 from chia.util.hash import std_hash @@ -84,7 +84,7 @@ ) -def update_testnet_overrides(network_id: str, overrides: Dict[str, Any]) -> None: +def update_testnet_overrides(network_id: str, overrides: dict[str, Any]) -> None: if network_id == "testnet11": if "SOFT_FORK5_HEIGHT" not in overrides: overrides["SOFT_FORK5_HEIGHT"] = 1340000 diff --git a/chia/consensus/difficulty_adjustment.py b/chia/consensus/difficulty_adjustment.py index 5a788032075a..63a85d27fb04 100644 --- a/chia/consensus/difficulty_adjustment.py +++ b/chia/consensus/difficulty_adjustment.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import List, Optional, Tuple +from typing import Optional from chia.consensus.block_record import BlockRecord from chia.consensus.blockchain_interface import BlockRecordsProtocol @@ -15,7 +15,7 @@ def _get_blocks_at_height( prev_b: BlockRecord, target_height: uint32, max_num_blocks: uint32 = uint32(1), -) -> List[BlockRecord]: +) -> list[BlockRecord]: """ Return a consecutive list of BlockRecords starting at target_height, returning a maximum of max_num_blocks. Assumes all block records are present. Does a slot linear search, if the blocks are not @@ -33,7 +33,7 @@ def _get_blocks_at_height( if header_hash == prev_b.header_hash: # Efficient fetching, since we are fetching ancestor blocks within the heaviest chain. We can directly # use the height_to_block_record method - block_list: List[BlockRecord] = [] + block_list: list[BlockRecord] = [] for h in range(target_height, target_height + max_num_blocks): assert blocks.contains_height(uint32(h)) block_list.append(blocks.height_to_block_record(uint32(h))) @@ -141,7 +141,7 @@ def can_finish_sub_and_full_epoch( deficit: uint8, block_at_height_included_ses: bool, prev_ses_block: Optional[BlockRecord] = None, -) -> Tuple[bool, bool]: +) -> tuple[bool, bool]: """ Returns a bool tuple first bool is true if the next sub-slot after height will form part of a new sub-epoch. Therefore @@ -359,7 +359,7 @@ def get_next_sub_slot_iters_and_difficulty( is_first_in_sub_slot: bool, prev_b: Optional[BlockRecord], blocks: BlockRecordsProtocol, -) -> Tuple[uint64, uint64]: +) -> tuple[uint64, uint64]: """ Retrieves the current sub_slot iters and difficulty of the next block after prev_b. diff --git a/chia/consensus/find_fork_point.py b/chia/consensus/find_fork_point.py index 89374bf0b019..f7aaad9b1088 100644 --- a/chia/consensus/find_fork_point.py +++ b/chia/consensus/find_fork_point.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Dict, Tuple, Union +from typing import Union from chia.consensus.block_record import BlockRecord from chia.consensus.blockchain_interface import BlockRecordsProtocol @@ -61,10 +61,10 @@ async def find_fork_point_in_chain( async def lookup_fork_chain( blocks: BlockRecordsProtocol, - block_1: Tuple[int, bytes32], - block_2: Tuple[int, bytes32], + block_1: tuple[int, bytes32], + block_2: tuple[int, bytes32], constants: ConsensusConstants, -) -> Tuple[Dict[uint32, bytes32], bytes32]: +) -> tuple[dict[uint32, bytes32], bytes32]: """ Tries to find height where new chain (block_2) diverged from block_1. The inputs are (height, header-hash)-tuples. @@ -82,7 +82,7 @@ async def lookup_fork_chain( height_2 = block_2[0] bh_2 = block_2[1] - ret: Dict[uint32, bytes32] = {} + ret: dict[uint32, bytes32] = {} while height_1 > height_2: [bh_1] = await blocks.prev_block_hash([bh_1]) diff --git a/chia/consensus/full_block_to_block_record.py b/chia/consensus/full_block_to_block_record.py index 7a47c5a34d84..b9ff5e0e5920 100644 --- a/chia/consensus/full_block_to_block_record.py +++ b/chia/consensus/full_block_to_block_record.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import List, Optional, Union +from typing import Optional, Union from chia.consensus.block_record import BlockRecord from chia.consensus.blockchain_interface import BlockRecordsProtocol @@ -111,13 +111,13 @@ def header_block_to_sub_block_record( icc_output = None if len(block.finished_sub_slots) > 0: - finished_challenge_slot_hashes: Optional[List[bytes32]] = [ + finished_challenge_slot_hashes: Optional[list[bytes32]] = [ sub_slot.challenge_chain.get_hash() for sub_slot in block.finished_sub_slots ] - finished_reward_slot_hashes: Optional[List[bytes32]] = [ + finished_reward_slot_hashes: Optional[list[bytes32]] = [ sub_slot.reward_chain.get_hash() for sub_slot in block.finished_sub_slots ] - finished_infused_challenge_slot_hashes: Optional[List[bytes32]] = [ + finished_infused_challenge_slot_hashes: Optional[list[bytes32]] = [ sub_slot.infused_challenge_chain.get_hash() for sub_slot in block.finished_sub_slots if sub_slot.infused_challenge_chain is not None diff --git a/chia/consensus/get_block_challenge.py b/chia/consensus/get_block_challenge.py index f74a64e42f2e..a295474bc900 100644 --- a/chia/consensus/get_block_challenge.py +++ b/chia/consensus/get_block_challenge.py @@ -1,7 +1,7 @@ from __future__ import annotations import logging -from typing import List, Union +from typing import Union from chia.consensus.block_record import BlockRecord from chia.consensus.blockchain_interface import BlockRecordsProtocol @@ -89,7 +89,7 @@ def get_block_challenge( challenges_to_look_for = 2 else: challenges_to_look_for = 1 - reversed_challenge_hashes: List[bytes32] = [] + reversed_challenge_hashes: list[bytes32] = [] curr: BlockRecord = blocks.block_record(header_block.prev_header_hash) while len(reversed_challenge_hashes) < challenges_to_look_for: if curr.first_in_sub_slot: diff --git a/chia/consensus/get_block_generator.py b/chia/consensus/get_block_generator.py index 6295c4622912..6469fd603b5d 100644 --- a/chia/consensus/get_block_generator.py +++ b/chia/consensus/get_block_generator.py @@ -1,6 +1,7 @@ from __future__ import annotations -from typing import Awaitable, Callable, Dict, Optional, Set +from collections.abc import Awaitable +from typing import Callable, Optional from chia.types.block_protocol import BlockInfo from chia.types.blockchain_format.sized_bytes import bytes32 @@ -9,7 +10,7 @@ async def get_block_generator( - lookup_block_generators: Callable[[bytes32, Set[uint32]], Awaitable[Dict[uint32, bytes]]], + lookup_block_generators: Callable[[bytes32, set[uint32]], Awaitable[dict[uint32, bytes]]], block: BlockInfo, ) -> Optional[BlockGenerator]: ref_list = block.transactions_generator_ref_list @@ -20,7 +21,7 @@ async def get_block_generator( return BlockGenerator(block.transactions_generator, []) generator_refs = set(ref_list) - generators: Dict[uint32, bytes] = await lookup_block_generators(block.prev_header_hash, generator_refs) + generators: dict[uint32, bytes] = await lookup_block_generators(block.prev_header_hash, generator_refs) result = [generators[height] for height in block.transactions_generator_ref_list] return BlockGenerator(block.transactions_generator, result) diff --git a/chia/consensus/multiprocess_validation.py b/chia/consensus/multiprocess_validation.py index 46b31b99edf3..5f0f7393c63d 100644 --- a/chia/consensus/multiprocess_validation.py +++ b/chia/consensus/multiprocess_validation.py @@ -5,9 +5,10 @@ import logging import time import traceback +from collections.abc import Sequence from concurrent.futures import Executor from dataclasses import dataclass -from typing import Dict, List, Optional, Sequence +from typing import Optional from chia_rs import AugSchemeMPL, SpendBundleConditions @@ -53,7 +54,7 @@ def pre_validate_block( constants: ConsensusConstants, blockchain: BlockRecordsProtocol, block: FullBlock, - prev_generators: Optional[List[bytes]], + prev_generators: Optional[list[bytes]], conds: Optional[SpendBundleConditions], vs: ValidationState, validate_signatures: bool, @@ -61,8 +62,8 @@ def pre_validate_block( try: validation_start = time.monotonic() - tx_additions: List[Coin] = [] - removals: List[bytes32] = [] + tx_additions: list[Coin] = [] + removals: list[bytes32] = [] if conds is not None: removals, tx_additions = tx_removals_and_additions(conds) elif block.transactions_generator is not None: @@ -138,12 +139,12 @@ async def pre_validate_blocks_multiprocessing( block_records: BlocksProtocol, blocks: Sequence[FullBlock], pool: Executor, - block_height_conds_map: Dict[uint32, SpendBundleConditions], + block_height_conds_map: dict[uint32, SpendBundleConditions], vs: ValidationState, *, - wp_summaries: Optional[List[SubEpochSummary]] = None, + wp_summaries: Optional[list[SubEpochSummary]] = None, validate_signatures: bool = True, -) -> List[PreValidationResult]: +) -> list[PreValidationResult]: """ This method must be called under the blockchain lock If all the full blocks pass pre-validation, (only validates header), returns the list of required iters. @@ -221,7 +222,7 @@ async def pre_validate_blocks_multiprocessing( blockchain.add_extra_block(block, block_rec) # Temporarily add block to chain prev_b = block_rec - previous_generators: Optional[List[bytes]] = None + previous_generators: Optional[list[bytes]] = None try: block_generator: Optional[BlockGenerator] = await get_block_generator( diff --git a/chia/consensus/vdf_info_computation.py b/chia/consensus/vdf_info_computation.py index d50c724c6dff..caea9854e3c8 100644 --- a/chia/consensus/vdf_info_computation.py +++ b/chia/consensus/vdf_info_computation.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import List, Optional, Tuple +from typing import Optional from chia.consensus.block_record import BlockRecord from chia.consensus.blockchain_interface import BlockRecordsProtocol @@ -13,13 +13,13 @@ def get_signage_point_vdf_info( constants: ConsensusConstants, - finished_sub_slots: List[EndOfSubSlotBundle], + finished_sub_slots: list[EndOfSubSlotBundle], overflow: bool, prev_b: Optional[BlockRecord], blocks: BlockRecordsProtocol, sp_total_iters: uint128, sp_iters: uint64, -) -> Tuple[bytes32, bytes32, ClassgroupElement, ClassgroupElement, uint64, uint64]: +) -> tuple[bytes32, bytes32, ClassgroupElement, ClassgroupElement, uint64, uint64]: """ Returns the following information, for the VDF of the signage point at sp_total_iters. cc and rc challenge hash diff --git a/chia/daemon/client.py b/chia/daemon/client.py index 96e55714a9ac..3a4866ab2f95 100644 --- a/chia/daemon/client.py +++ b/chia/daemon/client.py @@ -3,9 +3,10 @@ import asyncio import json import ssl +from collections.abc import AsyncIterator from contextlib import asynccontextmanager from pathlib import Path -from typing import Any, AsyncIterator, Dict, List, Optional +from typing import Any, Optional import aiohttp @@ -23,15 +24,15 @@ def __init__( max_message_size: int = 50 * 1000 * 1000, ): self._uri = uri - self._request_dict: Dict[str, asyncio.Event] = {} - self.response_dict: Dict[str, WsRpcMessage] = {} + self._request_dict: dict[str, asyncio.Event] = {} + self.response_dict: dict[str, WsRpcMessage] = {} self.ssl_context = ssl_context self.heartbeat = heartbeat self.client_session: Optional[aiohttp.ClientSession] = None self.websocket: Optional[aiohttp.ClientWebSocketResponse] = None self.max_message_size = max_message_size - def format_request(self, command: str, data: Dict[str, Any]) -> WsRpcMessage: + def format_request(self, command: str, data: dict[str, Any]) -> WsRpcMessage: request = create_payload_dict(command, data, "client", "daemon") return request @@ -92,13 +93,13 @@ async def _get(self, request: WsRpcMessage) -> WsRpcMessage: raise Exception(f"No response from daemon for request_id: {request_id}") async def get_version(self) -> WsRpcMessage: - data: Dict[str, Any] = {} + data: dict[str, Any] = {} request = self.format_request("get_version", data) response = await self._get(request) return response async def get_network_info(self) -> WsRpcMessage: - data: Dict[str, Any] = {} + data: dict[str, Any] = {} request = self.format_request("get_network_info", data) response = await self._get(request) return response @@ -124,7 +125,7 @@ async def is_running(self, service_name: str) -> bool: return False async def is_keyring_locked(self) -> bool: - data: Dict[str, Any] = {} + data: dict[str, Any] = {} request = self.format_request("is_keyring_locked", data) response = await self._get(request) if "is_keyring_locked" in response["data"]: @@ -152,7 +153,7 @@ async def exit(self) -> WsRpcMessage: request = self.format_request("exit", {}) return await self._get(request) - async def get_keys_for_plotting(self, fingerprints: Optional[List[uint32]] = None) -> WsRpcMessage: + async def get_keys_for_plotting(self, fingerprints: Optional[list[uint32]] = None) -> WsRpcMessage: data = {"fingerprints": fingerprints} if fingerprints else {} request = self.format_request("get_keys_for_plotting", data) response = await self._get(request) @@ -177,7 +178,7 @@ async def connect_to_daemon( async def connect_to_daemon_and_validate( - root_path: Path, config: Dict[str, Any], quiet: bool = False + root_path: Path, config: dict[str, Any], quiet: bool = False ) -> Optional[DaemonProxy]: """ Connect to the local daemon and do a ping to ensure that something is really @@ -213,7 +214,7 @@ async def connect_to_daemon_and_validate( @asynccontextmanager async def acquire_connection_to_daemon( - root_path: Path, config: Dict[str, Any], quiet: bool = False + root_path: Path, config: dict[str, Any], quiet: bool = False ) -> AsyncIterator[Optional[DaemonProxy]]: """ Asynchronous context manager which attempts to create a connection to the daemon. diff --git a/chia/daemon/keychain_proxy.py b/chia/daemon/keychain_proxy.py index 391f302062e8..2bbdbc89ded1 100644 --- a/chia/daemon/keychain_proxy.py +++ b/chia/daemon/keychain_proxy.py @@ -5,7 +5,7 @@ import ssl import traceback from pathlib import Path -from typing import Any, Dict, List, Literal, Optional, Tuple, Union, overload +from typing import Any, Literal, Optional, Union, overload from aiohttp import ClientConnectorError, ClientSession from chia_rs import AugSchemeMPL, G1Element, PrivateKey @@ -71,7 +71,7 @@ def use_local_keychain(self) -> bool: """ return self.keychain is not None - def format_request(self, command: str, data: Dict[str, Any]) -> WsRpcMessage: + def format_request(self, command: str, data: dict[str, Any]) -> WsRpcMessage: """ Overrides DaemonProxy.format_request() to add keychain-specific RPC params """ @@ -141,7 +141,7 @@ async def close(self) -> None: if self.keychain_connection_task is not None: await self.keychain_connection_task - async def get_response_for_request(self, request_name: str, data: Dict[str, Any]) -> Tuple[WsRpcMessage, bool]: + async def get_response_for_request(self, request_name: str, data: dict[str, Any]) -> tuple[WsRpcMessage, bool]: request = self.format_request(request_name, data) response = await self._get(request) success = response["data"].get("success", False) @@ -252,11 +252,11 @@ async def delete_key_by_fingerprint(self, fingerprint: int) -> None: if not success: self.handle_error(response) - async def get_all_private_keys(self) -> List[Tuple[PrivateKey, bytes]]: + async def get_all_private_keys(self) -> list[tuple[PrivateKey, bytes]]: """ Forwards to Keychain.get_all_private_keys() """ - keys: List[Tuple[PrivateKey, bytes]] = [] + keys: list[tuple[PrivateKey, bytes]] = [] if self.use_local_keychain(): keys = self.keychain.get_all_private_keys() else: @@ -415,11 +415,11 @@ async def get_key(self, fingerprint: int, include_secrets: bool = False) -> Opti self.handle_error(response) return key_data - async def get_keys(self, include_secrets: bool = False) -> List[KeyData]: + async def get_keys(self, include_secrets: bool = False) -> list[KeyData]: """ Returns all KeyData """ - keys: List[KeyData] = [] + keys: list[KeyData] = [] if self.use_local_keychain(): keys = self.keychain.get_keys(include_secrets) else: diff --git a/chia/daemon/keychain_server.py b/chia/daemon/keychain_server.py index 57edc5c387e4..8c3cfafd8145 100644 --- a/chia/daemon/keychain_server.py +++ b/chia/daemon/keychain_server.py @@ -3,7 +3,7 @@ import logging from dataclasses import dataclass, field from pathlib import Path -from typing import Any, Dict, List, Type +from typing import Any from chia_rs import PrivateKey @@ -65,7 +65,7 @@ def run(self, keychain: Keychain) -> GetKeyResponse: @streamable @dataclass(frozen=True) class GetKeysResponse(Streamable): - keys: List[KeyData] + keys: list[KeyData] @streamable @@ -91,7 +91,7 @@ def run(self, keychain: Keychain) -> GetPublicKeyResponse: class GetPublicKeyResponse(Streamable): key: KeyData - def to_json_dict(self) -> Dict[str, Any]: + def to_json_dict(self) -> dict[str, Any]: # Ensure that only approved keys are returned approved_keys = ["fingerprint", "public_key", "label"] key_dict = self.key.to_json_dict() @@ -108,9 +108,9 @@ def run(self, keychain: Keychain) -> GetPublicKeysResponse: @streamable @dataclass(frozen=True) class GetPublicKeysResponse(Streamable): - keys: List[KeyData] + keys: list[KeyData] - def to_json_dict(self) -> Dict[str, Any]: + def to_json_dict(self) -> dict[str, Any]: # Ensure that only approved keys are returned approved_keys = ["fingerprint", "public_key", "label"] return { @@ -149,9 +149,9 @@ class KeychainServer: """ _default_keychain: Keychain = field(default_factory=Keychain) - _alt_keychains: Dict[str, Keychain] = field(default_factory=dict) + _alt_keychains: dict[str, Keychain] = field(default_factory=dict) - def get_keychain_for_request(self, request: Dict[str, Any]) -> Keychain: + def get_keychain_for_request(self, request: dict[str, Any]) -> Keychain: """ Keychain instances can have user and service strings associated with them. The keychain backends ultimately point to the same data stores, but the user @@ -171,7 +171,7 @@ def get_keychain_for_request(self, request: Dict[str, Any]) -> Keychain: self._alt_keychains[key] = keychain return keychain - async def handle_command(self, command: str, data: Dict[str, Any]) -> Dict[str, Any]: + async def handle_command(self, command: str, data: dict[str, Any]) -> dict[str, Any]: try: if command == "add_private_key": data["private"] = True @@ -208,7 +208,7 @@ async def handle_command(self, command: str, data: Dict[str, Any]) -> Dict[str, log.exception(e) return {"success": False, "error": str(e), "command": command} - async def add_key(self, request: Dict[str, Any]) -> Dict[str, Any]: + async def add_key(self, request: dict[str, Any]) -> dict[str, Any]: if self.get_keychain_for_request(request).is_keyring_locked(): return {"success": False, "error": KEYCHAIN_ERR_LOCKED} @@ -245,7 +245,7 @@ async def add_key(self, request: Dict[str, Any]) -> Dict[str, Any]: return {"success": True, "fingerprint": fingerprint} - async def check_keys(self, request: Dict[str, Any]) -> Dict[str, Any]: + async def check_keys(self, request: dict[str, Any]) -> dict[str, Any]: if self.get_keychain_for_request(request).is_keyring_locked(): return {"success": False, "error": KEYCHAIN_ERR_LOCKED} @@ -261,7 +261,7 @@ async def check_keys(self, request: Dict[str, Any]) -> Dict[str, Any]: return {"success": True} - async def delete_all_keys(self, request: Dict[str, Any]) -> Dict[str, Any]: + async def delete_all_keys(self, request: dict[str, Any]) -> dict[str, Any]: if self.get_keychain_for_request(request).is_keyring_locked(): return {"success": False, "error": KEYCHAIN_ERR_LOCKED} @@ -269,7 +269,7 @@ async def delete_all_keys(self, request: Dict[str, Any]) -> Dict[str, Any]: return {"success": True} - async def delete_key_by_fingerprint(self, request: Dict[str, Any]) -> Dict[str, Any]: + async def delete_key_by_fingerprint(self, request: dict[str, Any]) -> dict[str, Any]: if self.get_keychain_for_request(request).is_keyring_locked(): return {"success": False, "error": KEYCHAIN_ERR_LOCKED} @@ -285,7 +285,7 @@ async def delete_key_by_fingerprint(self, request: Dict[str, Any]) -> Dict[str, return {"success": True} - async def run_request(self, request_dict: Dict[str, Any], request_type: Type[Any]) -> Dict[str, Any]: + async def run_request(self, request_dict: dict[str, Any], request_type: type[Any]) -> dict[str, Any]: keychain = self.get_keychain_for_request(request_dict) if keychain.is_keyring_locked(): return {"success": False, "error": KEYCHAIN_ERR_LOCKED} @@ -314,8 +314,8 @@ async def run_request(self, request_dict: Dict[str, Any], request_type: Type[Any "error_details": {"message": str(e)}, } - async def get_all_private_keys(self, request: Dict[str, Any]) -> Dict[str, Any]: - all_keys: List[Dict[str, Any]] = [] + async def get_all_private_keys(self, request: dict[str, Any]) -> dict[str, Any]: + all_keys: list[dict[str, Any]] = [] if self.get_keychain_for_request(request).is_keyring_locked(): return {"success": False, "error": KEYCHAIN_ERR_LOCKED} @@ -325,8 +325,8 @@ async def get_all_private_keys(self, request: Dict[str, Any]) -> Dict[str, Any]: return {"success": True, "private_keys": all_keys} - async def get_first_private_key(self, request: Dict[str, Any]) -> Dict[str, Any]: - key: Dict[str, Any] = {} + async def get_first_private_key(self, request: dict[str, Any]) -> dict[str, Any]: + key: dict[str, Any] = {} if self.get_keychain_for_request(request).is_keyring_locked(): return {"success": False, "error": KEYCHAIN_ERR_LOCKED} @@ -340,7 +340,7 @@ async def get_first_private_key(self, request: Dict[str, Any]) -> Dict[str, Any] return {"success": True, "private_key": key} - async def get_key_for_fingerprint(self, request: Dict[str, Any]) -> Dict[str, Any]: + async def get_key_for_fingerprint(self, request: dict[str, Any]) -> dict[str, Any]: keychain = self.get_keychain_for_request(request) if keychain.is_keyring_locked(): return {"success": False, "error": KEYCHAIN_ERR_LOCKED} diff --git a/chia/daemon/server.py b/chia/daemon/server.py index d1c0be1866cd..89a4dc918399 100644 --- a/chia/daemon/server.py +++ b/chia/daemon/server.py @@ -13,12 +13,13 @@ import time import traceback import uuid +from collections.abc import AsyncIterator from concurrent.futures import ThreadPoolExecutor from contextlib import asynccontextmanager from enum import Enum from pathlib import Path from types import FrameType -from typing import Any, AsyncIterator, Dict, List, Optional, Set, TextIO, Tuple +from typing import Any, Optional, TextIO from chia_rs import G1Element from typing_extensions import Protocol @@ -117,16 +118,16 @@ def executable_for_service(service_name: str) -> str: return cmd_to_exec if cmd_to_exec is not None else service_name -async def ping() -> Dict[str, Any]: +async def ping() -> dict[str, Any]: response = {"success": True, "value": "pong"} return response class Command(Protocol): - async def __call__(self, websocket: WebSocketResponse, request: Dict[str, Any]) -> Dict[str, Any]: ... + async def __call__(self, websocket: WebSocketResponse, request: dict[str, Any]) -> dict[str, Any]: ... -def _get_keys_by_fingerprints(fingerprints: Optional[List[uint32]]) -> Tuple[List[KeyData], Set[uint32]]: +def _get_keys_by_fingerprints(fingerprints: Optional[list[uint32]]) -> tuple[list[KeyData], set[uint32]]: all_keys = Keychain().get_keys(include_secrets=True) missing_fingerprints = set() @@ -153,7 +154,7 @@ class StatusMessage: command: str destination: str origin: str - data: Dict[str, Any] + data: dict[str, Any] def create_payload(self) -> str: return create_payload(command=self.command, data=self.data, origin=self.origin, destination=self.destination) @@ -171,9 +172,9 @@ def __init__( ): self.root_path = root_path self.log = log - self.services: Dict[str, List[subprocess.Popen]] = dict() - self.plots_queue: List[Dict] = [] - self.connections: Dict[str, Set[WebSocketResponse]] = dict() # service name : {WebSocketResponse} + self.services: dict[str, list[subprocess.Popen]] = dict() + self.plots_queue: list[dict] = [] + self.connections: dict[str, set[WebSocketResponse]] = dict() # service name : {WebSocketResponse} self.ping_job: Optional[asyncio.Task] = None self.net_config = load_config(root_path, "config.yaml") self.self_hostname = self.net_config["self_hostname"] @@ -247,10 +248,10 @@ def cancel_task_safe(self, task: Optional[asyncio.Task]): except Exception as e: self.log.error(f"Error while canceling task.{e} {task}") - async def stop_command(self, websocket: WebSocketResponse, request: Dict[str, Any] = {}) -> Dict[str, Any]: + async def stop_command(self, websocket: WebSocketResponse, request: dict[str, Any] = {}) -> dict[str, Any]: return await self.stop() - async def stop(self) -> Dict[str, Any]: + async def stop(self) -> dict[str, Any]: self.cancel_task_safe(self.ping_job) self.cancel_task_safe(self.state_changed_task) service_names = list(self.services.keys()) @@ -330,7 +331,7 @@ async def incoming_connection(self, request: web.Request) -> web.StreamResponse: return ws - async def send_all_responses(self, connections: Set[WebSocketResponse], response: str) -> None: + async def send_all_responses(self, connections: set[WebSocketResponse], response: str) -> None: for connection in connections.copy(): try: await connection.send_str(response) @@ -348,7 +349,7 @@ async def send_all_responses(self, connections: Set[WebSocketResponse], response await connection.close() - def remove_connection(self, websocket: WebSocketResponse) -> List[str]: + def remove_connection(self, websocket: WebSocketResponse) -> list[str]: """Returns a list of service names from which the connection was removed""" service_names = [] for service_name, connections in self.connections.items(): @@ -396,7 +397,7 @@ async def ping_task(self) -> None: async def handle_message( self, websocket: WebSocketResponse, message: WsRpcMessage - ) -> Optional[Tuple[str, Set[WebSocketResponse]]]: + ) -> Optional[tuple[str, set[WebSocketResponse]]]: """ This function gets called when new message is received via websocket. """ @@ -437,7 +438,7 @@ async def handle_message( full_response = format_response(message, response) return full_response, {websocket} - def get_command_mapping(self) -> Dict[str, Command]: + def get_command_mapping(self) -> dict[str, Command]: """ Returns a mapping of commands to their respective function calls. """ @@ -465,11 +466,11 @@ def get_command_mapping(self) -> Dict[str, Command]: "get_network_info": self.get_network_info, } - async def get_network_info(self, websocket: WebSocketResponse, request: Dict[str, Any]) -> Dict[str, Any]: + async def get_network_info(self, websocket: WebSocketResponse, request: dict[str, Any]) -> dict[str, Any]: network_name = self.net_config["selected_network"] address_prefix = self.net_config["network_overrides"]["config"][network_name]["address_prefix"] genesis_challenge = self.net_config["network_overrides"]["constants"][network_name]["GENESIS_CHALLENGE"] - response: Dict[str, Any] = { + response: dict[str, Any] = { "success": True, "network_name": network_name, "network_prefix": address_prefix, @@ -477,22 +478,22 @@ async def get_network_info(self, websocket: WebSocketResponse, request: Dict[str } return response - async def is_keyring_locked(self, websocket: WebSocketResponse, request: Dict[str, Any]) -> Dict[str, Any]: + async def is_keyring_locked(self, websocket: WebSocketResponse, request: dict[str, Any]) -> dict[str, Any]: locked: bool = Keychain.is_keyring_locked() - response: Dict[str, Any] = {"success": True, "is_keyring_locked": locked} + response: dict[str, Any] = {"success": True, "is_keyring_locked": locked} return response - async def keyring_status_command(self, websocket: WebSocketResponse, request: Dict[str, Any]) -> Dict[str, Any]: + async def keyring_status_command(self, websocket: WebSocketResponse, request: dict[str, Any]) -> dict[str, Any]: return await self.keyring_status() - async def keyring_status(self) -> Dict[str, Any]: + async def keyring_status(self) -> dict[str, Any]: can_save_passphrase: bool = supports_os_passphrase_storage() user_passphrase_is_set: bool = Keychain.has_master_passphrase() and not using_default_passphrase() locked: bool = Keychain.is_keyring_locked() can_set_passphrase_hint: bool = True passphrase_hint: str = Keychain.get_master_passphrase_hint() or "" - requirements: Dict[str, Any] = passphrase_requirements() - response: Dict[str, Any] = { + requirements: dict[str, Any] = passphrase_requirements() + response: dict[str, Any] = { "success": True, "is_keyring_locked": locked, "can_save_passphrase": can_save_passphrase, @@ -505,7 +506,7 @@ async def keyring_status(self) -> Dict[str, Any]: self.log.debug(f"Keyring status: {response}") return response - async def unlock_keyring(self, websocket: WebSocketResponse, request: Dict[str, Any]) -> Dict[str, Any]: + async def unlock_keyring(self, websocket: WebSocketResponse, request: dict[str, Any]) -> dict[str, Any]: success: bool = False error: Optional[str] = None key: Optional[str] = request.get("key", None) @@ -534,14 +535,14 @@ async def unlock_keyring(self, websocket: WebSocketResponse, request: Dict[str, tb = traceback.format_exc() self.log.error(f"check_keys failed after unlocking keyring: {e} {tb}") - response: Dict[str, Any] = {"success": success, "error": error} + response: dict[str, Any] = {"success": success, "error": error} return response async def validate_keyring_passphrase( self, websocket: WebSocketResponse, - request: Dict[str, Any], - ) -> Dict[str, Any]: + request: dict[str, Any], + ) -> dict[str, Any]: success: bool = False error: Optional[str] = None key: Optional[str] = request.get("key", None) @@ -555,10 +556,10 @@ async def validate_keyring_passphrase( self.log.error(f"Keyring passphrase validation failed: {e} {tb}") error = "validation exception" - response: Dict[str, Any] = {"success": success, "error": error} + response: dict[str, Any] = {"success": success, "error": error} return response - async def set_keyring_passphrase(self, websocket: WebSocketResponse, request: Dict[str, Any]) -> Dict[str, Any]: + async def set_keyring_passphrase(self, websocket: WebSocketResponse, request: dict[str, Any]) -> dict[str, Any]: success: bool = False error: Optional[str] = None current_passphrase: Optional[str] = None @@ -599,10 +600,10 @@ async def set_keyring_passphrase(self, websocket: WebSocketResponse, request: Di # Inform the GUI of keyring status changes self.keyring_status_changed(await self.keyring_status(), "wallet_ui") - response: Dict[str, Any] = {"success": success, "error": error} + response: dict[str, Any] = {"success": success, "error": error} return response - async def remove_keyring_passphrase(self, websocket: WebSocketResponse, request: Dict[str, Any]) -> Dict[str, Any]: + async def remove_keyring_passphrase(self, websocket: WebSocketResponse, request: dict[str, Any]) -> dict[str, Any]: success: bool = False error: Optional[str] = None current_passphrase: Optional[str] = None @@ -626,28 +627,28 @@ async def remove_keyring_passphrase(self, websocket: WebSocketResponse, request: # Inform the GUI of keyring status changes self.keyring_status_changed(await self.keyring_status(), "wallet_ui") - response: Dict[str, Any] = {"success": success, "error": error} + response: dict[str, Any] = {"success": success, "error": error} return response - async def get_status(self, websocket: WebSocketResponse, request: Dict[str, Any]) -> Dict[str, Any]: + async def get_status(self, websocket: WebSocketResponse, request: dict[str, Any]) -> dict[str, Any]: response = {"success": True, "genesis_initialized": True} return response - async def get_version(self, websocket: WebSocketResponse, request: Dict[str, Any]) -> Dict[str, Any]: + async def get_version(self, websocket: WebSocketResponse, request: dict[str, Any]) -> dict[str, Any]: response = {"success": True, "version": __version__} return response - async def get_plotters(self, websocket: WebSocketResponse, request: Dict[str, Any]) -> Dict[str, Any]: - plotters: Dict[str, Any] = get_available_plotters(self.root_path) - response: Dict[str, Any] = {"success": True, "plotters": plotters} + async def get_plotters(self, websocket: WebSocketResponse, request: dict[str, Any]) -> dict[str, Any]: + plotters: dict[str, Any] = get_available_plotters(self.root_path) + response: dict[str, Any] = {"success": True, "plotters": plotters} return response - async def get_routes(self, websocket: WebSocketResponse, request: Dict[str, Any]) -> Dict[str, Any]: + async def get_routes(self, websocket: WebSocketResponse, request: dict[str, Any]) -> dict[str, Any]: routes = list(self.get_command_mapping().keys()) - response: Dict[str, Any] = {"success": True, "routes": routes} + response: dict[str, Any] = {"success": True, "routes": routes} return response - async def get_wallet_addresses(self, websocket: WebSocketResponse, request: Dict[str, Any]) -> Dict[str, Any]: + async def get_wallet_addresses(self, websocket: WebSocketResponse, request: dict[str, Any]) -> dict[str, Any]: fingerprints = request.get("fingerprints", None) keys, missing_fingerprints = _get_keys_by_fingerprints(fingerprints) if len(missing_fingerprints) > 0: @@ -684,16 +685,16 @@ async def get_wallet_addresses(self, websocket: WebSocketResponse, request: Dict wallet_addresses_by_fingerprint[key.fingerprint] = address_entries - response: Dict[str, Any] = {"success": True, "wallet_addresses": wallet_addresses_by_fingerprint} + response: dict[str, Any] = {"success": True, "wallet_addresses": wallet_addresses_by_fingerprint} return response - async def get_keys_for_plotting(self, websocket: WebSocketResponse, request: Dict[str, Any]) -> Dict[str, Any]: + async def get_keys_for_plotting(self, websocket: WebSocketResponse, request: dict[str, Any]) -> dict[str, Any]: fingerprints = request.get("fingerprints", None) keys, missing_fingerprints = _get_keys_by_fingerprints(fingerprints) if len(missing_fingerprints) > 0: return {"success": False, "error": f"key(s) not found for fingerprint(s) {missing_fingerprints}"} - keys_for_plot: Dict[uint32, Any] = {} + keys_for_plot: dict[uint32, Any] = {} for key in keys: if key.secrets is None: continue @@ -704,13 +705,13 @@ async def get_keys_for_plotting(self, websocket: WebSocketResponse, request: Dic "farmer_public_key": bytes(farmer_public_key).hex(), "pool_public_key": bytes(pool_public_key).hex(), } - response: Dict[str, Any] = { + response: dict[str, Any] = { "success": True, "keys": keys_for_plot, } return response - def plot_queue_to_payload(self, plot_queue_item, send_full_log: bool) -> Dict[str, Any]: + def plot_queue_to_payload(self, plot_queue_item, send_full_log: bool) -> dict[str, Any]: error = plot_queue_item.get("error") has_error = error is not None @@ -737,7 +738,7 @@ def prepare_plot_state_message(self, state: PlotEvent, id): } return message - def extract_plot_queue(self, id=None) -> List[Dict]: + def extract_plot_queue(self, id=None) -> list[dict]: send_full_log = id is None data = [] for item in self.plots_queue: @@ -778,14 +779,14 @@ async def _state_changed(self, message: StatusMessage) -> None: websockets.remove(websocket) await websocket.close() - def state_changed(self, service: str, message: Dict[str, Any]) -> None: + def state_changed(self, service: str, message: dict[str, Any]) -> None: self.state_changed_msg_queue.put_nowait( StatusMessage( service=service, command="state_changed", destination="wallet_ui", origin=service, data=message ) ) - def keyring_status_changed(self, keyring_status: Dict[str, Any], destination: str) -> None: + def keyring_status_changed(self, keyring_status: dict[str, Any], destination: str) -> None: self.state_changed_msg_queue.put_nowait( StatusMessage( service="wallet_ui", @@ -799,7 +800,7 @@ def keyring_status_changed(self, keyring_status: Dict[str, Any], destination: st async def _watch_file_changes(self, config, fp: TextIO, loop: asyncio.AbstractEventLoop): id: str = config["id"] plotter: str = config["plotter"] - final_words: List[str] = [] + final_words: list[str] = [] if plotter == "chiapos": final_words = ["Renamed final file"] @@ -841,7 +842,7 @@ async def _track_plotting_progress(self, config, loop: asyncio.AbstractEventLoop with open(file_path) as fp: await self._watch_file_changes(config, fp, loop) - def _common_plotting_command_args(self, request: Any, ignoreCount: bool) -> List[str]: + def _common_plotting_command_args(self, request: Any, ignoreCount: bool) -> list[str]: n = 1 if ignoreCount else request["n"] # Plot count d = request["d"] # Final directory r = request["r"] # Threads @@ -849,7 +850,7 @@ def _common_plotting_command_args(self, request: Any, ignoreCount: bool) -> List p = request.get("p") # Pool pubkey c = request.get("c") # Pool contract address - command_args: List[str] = ["-n", str(n), "-d", d, "-r", str(r)] + command_args: list[str] = ["-n", str(n), "-d", d, "-r", str(r)] if f is not None: command_args.append("-f") @@ -863,7 +864,7 @@ def _common_plotting_command_args(self, request: Any, ignoreCount: bool) -> List return command_args - def _chiapos_plotting_command_args(self, request: Any, ignoreCount: bool) -> List[str]: + def _chiapos_plotting_command_args(self, request: Any, ignoreCount: bool) -> list[str]: k = request["k"] # Plot size t = request["t"] # Temp directory t2 = request.get("t2") # Temp2 directory @@ -874,7 +875,7 @@ def _chiapos_plotting_command_args(self, request: Any, ignoreCount: bool) -> Lis x = request["x"] # Exclude final directory override_k = request["overrideK"] # Force plot sizes < k32 - command_args: List[str] = ["-k", str(k), "-t", t, "-b", str(b), "-u", str(u)] + command_args: list[str] = ["-k", str(k), "-t", t, "-b", str(b), "-u", str(u)] if t2 is not None: command_args.append("-2") @@ -891,12 +892,12 @@ def _chiapos_plotting_command_args(self, request: Any, ignoreCount: bool) -> Lis return command_args - def _bladebit_plotting_command_args(self, request: Any, ignoreCount: bool) -> List[str]: + def _bladebit_plotting_command_args(self, request: Any, ignoreCount: bool) -> list[str]: plot_type = request["plot_type"] if plot_type not in ["ramplot", "diskplot", "cudaplot"]: raise ValueError(f"Unknown plot_type: {plot_type}") - command_args: List[str] = [] + command_args: list[str] = [] # Common options among diskplot, ramplot, cudaplot w = request.get("w", False) # Warm start @@ -991,7 +992,7 @@ def _bladebit_plotting_command_args(self, request: Any, ignoreCount: bool) -> Li return command_args - def _madmax_plotting_command_args(self, request: Any, ignoreCount: bool, index: int) -> List[str]: + def _madmax_plotting_command_args(self, request: Any, ignoreCount: bool, index: int) -> list[str]: k = request["k"] # Plot size t = request["t"] # Temp directory t2 = request["t2"] # Temp2 directory @@ -1000,7 +1001,7 @@ def _madmax_plotting_command_args(self, request: Any, ignoreCount: bool, index: K = request.get("K", 1) # Thread multiplier for phase 2 G = request.get("G", False) # Alternate tmpdir/tmp2dir - command_args: List[str] = [] + command_args: list[str] = [] command_args.append(f"-k{k}") command_args.append(f"-u{u}") command_args.append(f"-v{v}") @@ -1017,9 +1018,9 @@ def _madmax_plotting_command_args(self, request: Any, ignoreCount: bool, index: return command_args - def _build_plotting_command_args(self, request: Any, ignoreCount: bool, index: int) -> List[str]: + def _build_plotting_command_args(self, request: Any, ignoreCount: bool, index: int) -> list[str]: plotter: str = request.get("plotter", "chiapos") - command_args: List[str] = ["chia", "plotters", plotter] + command_args: list[str] = ["chia", "plotters", plotter] if plotter == "bladebit": # plotter command must be either @@ -1064,7 +1065,7 @@ def _run_next_serial_plotting(self, loop: asyncio.AbstractEventLoop, queue: str if next_plot_id is not None: loop.create_task(self._start_plotting(next_plot_id, loop, queue)) - def _post_process_plotting_job(self, job: Dict[str, Any]): + def _post_process_plotting_job(self, job: dict[str, Any]): id: str = job["id"] final_dir: str = job["final_dir"] exclude_final_dir: bool = job["exclude_final_dir"] @@ -1141,7 +1142,7 @@ async def _start_plotting(self, id: str, loop: asyncio.AbstractEventLoop, queue: current_process.wait() # prevent zombies self._run_next_serial_plotting(loop, queue) - async def start_plotting(self, websocket: WebSocketResponse, request: Dict[str, Any]) -> Dict[str, Any]: + async def start_plotting(self, websocket: WebSocketResponse, request: dict[str, Any]) -> dict[str, Any]: service_name = request["service"] plotter = request.get("plotter", "chiapos") @@ -1162,7 +1163,7 @@ async def start_plotting(self, websocket: WebSocketResponse, request: Dict[str, } return response - ids: List[str] = [] + ids: list[str] = [] for k in range(count): id = str(uuid.uuid4()) ids.append(id) @@ -1209,7 +1210,7 @@ async def start_plotting(self, websocket: WebSocketResponse, request: Dict[str, return response - async def stop_plotting(self, websocket: WebSocketResponse, request: Dict[str, Any]) -> Dict[str, Any]: + async def stop_plotting(self, websocket: WebSocketResponse, request: dict[str, Any]) -> dict[str, Any]: id = request["id"] config = self._get_plots_queue_item(id) if config is None: @@ -1251,7 +1252,7 @@ async def stop_plotting(self, websocket: WebSocketResponse, request: Dict[str, A self.state_changed(service_plotter, self.prepare_plot_state_message(PlotEvent.STATE_CHANGED, id)) return {"success": False} - async def start_service(self, websocket: WebSocketResponse, request: Dict[str, Any]): + async def start_service(self, websocket: WebSocketResponse, request: dict[str, Any]): service_command = request["service"] error = None @@ -1295,14 +1296,14 @@ async def start_service(self, websocket: WebSocketResponse, request: Dict[str, A response = {"success": success, "service": service_command, "error": error} return response - async def stop_service(self, websocket: WebSocketResponse, request: Dict[str, Any]) -> Dict[str, Any]: + async def stop_service(self, websocket: WebSocketResponse, request: dict[str, Any]) -> dict[str, Any]: service_name = request["service"] result = await kill_service(self.root_path, self.services, service_name) response = {"success": result, "service_name": service_name} return response def is_service_running(self, service_name: str) -> bool: - processes: List[subprocess.Popen] + processes: list[subprocess.Popen] if service_name == service_plotter: processes = self.services.get(service_name, []) is_running = len(processes) > 0 @@ -1317,19 +1318,19 @@ def is_service_running(self, service_name: str) -> bool: is_running = len(service_connections) > 0 return is_running - async def running_services_command(self, websocket: WebSocketResponse, request: Dict[str, Any]) -> Dict[str, Any]: + async def running_services_command(self, websocket: WebSocketResponse, request: dict[str, Any]) -> dict[str, Any]: return await self.running_services() - async def running_services(self) -> Dict[str, Any]: + async def running_services(self) -> dict[str, Any]: services = list({*self.services.keys(), *self.connections.keys()}) running_services = [service_name for service_name in services if self.is_service_running(service_name)] return {"success": True, "running_services": running_services} - async def is_running_command(self, websocket: WebSocketResponse, request: Dict[str, Any]) -> Dict[str, Any]: + async def is_running_command(self, websocket: WebSocketResponse, request: dict[str, Any]) -> dict[str, Any]: return await self.is_running(request=request) - async def is_running(self, request: Dict[str, Any]) -> Dict[str, Any]: + async def is_running(self, request: dict[str, Any]) -> dict[str, Any]: service_name = request["service"] is_running = self.is_service_running(service_name) return {"success": True, "service_name": service_name, "is_running": is_running} @@ -1340,7 +1341,7 @@ async def exit(self) -> None: await self.webserver.await_closed() log.info("chia daemon exiting") - async def register_service(self, websocket: WebSocketResponse, request: Dict[str, Any]) -> Dict[str, Any]: + async def register_service(self, websocket: WebSocketResponse, request: dict[str, Any]) -> dict[str, Any]: self.log.info(f"Register service {request}") service = request.get("service") if service is None: @@ -1350,7 +1351,7 @@ async def register_service(self, websocket: WebSocketResponse, request: Dict[str self.connections[service] = set() self.connections[service].add(websocket) - response: Dict[str, Any] = {"success": True} + response: dict[str, Any] = {"success": True} if service == service_plotter: response = { "success": True, @@ -1394,8 +1395,8 @@ def plotter_log_path(root_path: Path, id: str): def launch_plotter( - root_path: Path, service_name: str, service_array: List[str], id: str -) -> Tuple[subprocess.Popen, Path]: + root_path: Path, service_name: str, service_array: list[str], id: str +) -> tuple[subprocess.Popen, Path]: # we need to pass on the possibly altered CHIA_ROOT os.environ["CHIA_ROOT"] = str(root_path) service_executable = executable_for_service(service_array[0]) @@ -1438,7 +1439,7 @@ def launch_plotter( return process, pid_path -def launch_service(root_path: Path, service_command) -> Tuple[subprocess.Popen, Path]: +def launch_service(root_path: Path, service_command) -> tuple[subprocess.Popen, Path]: """ Launch a child process. """ @@ -1482,7 +1483,7 @@ def launch_service(root_path: Path, service_command) -> Tuple[subprocess.Popen, async def kill_processes( - processes: List[subprocess.Popen], + processes: list[subprocess.Popen], root_path: Path, service_name: str, id: str, @@ -1526,7 +1527,7 @@ async def kill_processes( async def kill_service( - root_path: Path, services: Dict[str, List[subprocess.Popen]], service_name: str, delay_before_kill: int = 15 + root_path: Path, services: dict[str, list[subprocess.Popen]], service_name: str, delay_before_kill: int = 15 ) -> bool: processes = services.get(service_name) if processes is None: @@ -1536,7 +1537,7 @@ async def kill_service( return result -def is_running(services: Dict[str, subprocess.Popen], service_name: str) -> bool: +def is_running(services: dict[str, subprocess.Popen], service_name: str) -> bool: process = services.get(service_name) return process is not None and process.poll() is None diff --git a/chia/data_layer/data_layer.py b/chia/data_layer/data_layer.py index 0c0360a0c5ea..f55e879fcb27 100644 --- a/chia/data_layer/data_layer.py +++ b/chia/data_layer/data_layer.py @@ -10,22 +10,9 @@ import random import time import traceback +from collections.abc import AsyncIterator, Awaitable from pathlib import Path -from typing import ( - TYPE_CHECKING, - Any, - AsyncIterator, - Awaitable, - ClassVar, - Dict, - List, - Optional, - Set, - Tuple, - Union, - cast, - final, -) +from typing import TYPE_CHECKING, Any, ClassVar, Optional, Union, cast, final import aiohttp @@ -82,7 +69,7 @@ from chia.wallet.util.tx_config import DEFAULT_TX_CONFIG -async def get_plugin_info(plugin_remote: PluginRemote) -> Tuple[PluginRemote, Dict[str, Any]]: +async def get_plugin_info(plugin_remote: PluginRemote) -> tuple[PluginRemote, dict[str, Any]]: try: async with aiohttp.ClientSession() as session: async with session.post( @@ -107,15 +94,15 @@ class DataLayer: _protocol_check: ClassVar[RpcServiceProtocol] = cast("DataLayer", None) db_path: Path - config: Dict[str, Any] + config: dict[str, Any] root_path: Path log: logging.Logger wallet_rpc_init: Awaitable[WalletRpcClient] - downloaders: List[PluginRemote] - uploaders: List[PluginRemote] + downloaders: list[PluginRemote] + uploaders: list[PluginRemote] maximum_full_file_count: int server_files_location: Path - unsubscribe_data_queue: List[UnsubscribeData] + unsubscribe_data_queue: list[UnsubscribeData] _server: Optional[ChiaServer] = None none_bytes: bytes32 = bytes32([0] * 32) initialized: bool = False @@ -161,11 +148,11 @@ def wallet_rpc(self) -> WalletRpcClient: @classmethod def create( cls, - config: Dict[str, Any], + config: dict[str, Any], root_path: Path, wallet_rpc_init: Awaitable[WalletRpcClient], - downloaders: List[PluginRemote], - uploaders: List[PluginRemote], # dont add FilesystemUploader to this, it is the default uploader + downloaders: list[PluginRemote], + uploaders: list[PluginRemote], # dont add FilesystemUploader to this, it is the default uploader name: Optional[str] = None, ) -> DataLayer: if name == "": @@ -236,7 +223,7 @@ def _set_state_changed_callback(self, callback: StateChangedProtocol) -> None: async def on_connect(self, connection: WSChiaConnection) -> None: pass - def get_connections(self, request_node_type: Optional[NodeType]) -> List[Dict[str, Any]]: + def get_connections(self, request_node_type: Optional[NodeType]) -> list[dict[str, Any]]: return default_get_connections(server=self.server, request_node_type=request_node_type) def set_server(self, server: ChiaServer) -> None: @@ -252,7 +239,7 @@ async def wallet_log_in(self, fingerprint: int) -> int: async def create_store( self, fee: uint64, root: bytes32 = bytes32([0] * 32) - ) -> Tuple[List[TransactionRecord], bytes32]: + ) -> tuple[list[TransactionRecord], bytes32]: txs, store_id = await self.wallet_rpc.create_new_dl(root, fee) res = await self.data_store.create_tree(store_id=store_id) if res is None: @@ -263,7 +250,7 @@ async def create_store( async def batch_update( self, store_id: bytes32, - changelist: List[Dict[str, Any]], + changelist: list[dict[str, Any]], fee: uint64, submit_on_chain: bool = True, ) -> Optional[TransactionRecord]: @@ -287,11 +274,11 @@ async def _get_publishable_root_hash(self, store_id: bytes32) -> bytes32: async def multistore_batch_update( self, - store_updates: List[Dict[str, Any]], + store_updates: list[dict[str, Any]], fee: uint64, submit_on_chain: bool = True, - ) -> List[TransactionRecord]: - store_ids: Set[bytes32] = set() + ) -> list[TransactionRecord]: + store_ids: set[bytes32] = set() for update in store_updates: store_id = update["store_id"] changelist = update["changelist"] @@ -306,7 +293,7 @@ async def multistore_batch_update( await self.data_store.clean_node_table() if submit_on_chain: - update_dictionary: Dict[bytes32, bytes32] = {} + update_dictionary: dict[bytes32, bytes32] = {} for store_id in store_ids: await self._update_confirmation_status(store_id=store_id) root_hash = await self._get_publishable_root_hash(store_id=store_id) @@ -332,9 +319,9 @@ async def submit_pending_root( await self.data_store.change_root_status(pending_root, Status.PENDING) return await self.publish_update(store_id, fee) - async def submit_all_pending_roots(self, fee: uint64) -> List[TransactionRecord]: + async def submit_all_pending_roots(self, fee: uint64) -> list[TransactionRecord]: pending_roots = await self.data_store.get_all_pending_batches_roots() - update_dictionary: Dict[bytes32, bytes32] = {} + update_dictionary: dict[bytes32, bytes32] = {} if len(pending_roots) == 0: raise Exception("No pending roots found to submit") for pending_root in pending_roots: @@ -347,7 +334,7 @@ async def submit_all_pending_roots(self, fee: uint64) -> List[TransactionRecord] async def batch_insert( self, store_id: bytes32, - changelist: List[Dict[str, Any]], + changelist: list[dict[str, Any]], status: Status = Status.PENDING, enable_batch_autoinsert: Optional[bool] = None, ) -> bytes32: @@ -359,7 +346,7 @@ async def batch_insert( raise Exception("Already have a pending root waiting for confirmation.") # check before any DL changes that this singleton is currently owned by this wallet - singleton_records: List[SingletonRecord] = await self.get_owned_stores() + singleton_records: list[SingletonRecord] = await self.get_owned_stores() if not any(store_id == singleton.launcher_id for singleton in singleton_records): raise ValueError(f"Singleton with launcher ID {store_id} is not owned by DL Wallet") @@ -417,7 +404,7 @@ async def get_keys_values( self, store_id: bytes32, root_hash: Union[bytes32, Unspecified], - ) -> List[TerminalNode]: + ) -> list[TerminalNode]: await self._update_confirmation_status(store_id=store_id) res = await self.data_store.get_keys_values(store_id, root_hash) @@ -439,7 +426,7 @@ async def get_keys_values_paginated( res = await self.data_store.get_keys_values_paginated(store_id, page, max_page_size, root_hash) return res - async def get_keys(self, store_id: bytes32, root_hash: Union[bytes32, Unspecified]) -> List[bytes]: + async def get_keys(self, store_id: bytes32, root_hash: Union[bytes32, Unspecified]) -> list[bytes]: await self._update_confirmation_status(store_id=store_id) res = await self.data_store.get_keys(store_id, root_hash) @@ -459,7 +446,7 @@ async def get_keys_paginated( res = await self.data_store.get_keys_paginated(store_id, page, max_page_size, root_hash) return res - async def get_ancestors(self, node_hash: bytes32, store_id: bytes32) -> List[InternalNode]: + async def get_ancestors(self, node_hash: bytes32, store_id: bytes32) -> list[InternalNode]: await self._update_confirmation_status(store_id=store_id) res = await self.data_store.get_ancestors(node_hash=node_hash, store_id=store_id) @@ -482,7 +469,7 @@ async def get_local_root(self, store_id: bytes32) -> Optional[bytes32]: return None return res.node_hash - async def get_root_history(self, store_id: bytes32) -> List[SingletonRecord]: + async def get_root_history(self, store_id: bytes32) -> list[SingletonRecord]: records = await self.wallet_rpc.dl_history(store_id) if records is None: self.log.error(f"Failed to get root history for {store_id.hex()}") @@ -793,7 +780,7 @@ async def add_missing_files(self, store_id: bytes32, overwrite: bool, foldername else: self.log.debug(f"uploaded to uploader {uploader}") - async def subscribe(self, store_id: bytes32, urls: List[str]) -> Subscription: + async def subscribe(self, store_id: bytes32, urls: list[str]) -> Subscription: parsed_urls = [url.rstrip("/") for url in urls] subscription = Subscription(store_id, [ServerInfo(url, 0, 0) for url in parsed_urls]) await self.wallet_rpc.dl_track_new(subscription.store_id) @@ -802,7 +789,7 @@ async def subscribe(self, store_id: bytes32, urls: List[str]) -> Subscription: self.log.info(f"Done adding subscription: {subscription.store_id}") return subscription - async def remove_subscriptions(self, store_id: bytes32, urls: List[str]) -> None: + async def remove_subscriptions(self, store_id: bytes32, urls: list[str]) -> None: parsed_urls = [url.rstrip("/") for url in urls] async with self.subscription_lock: await self.data_store.remove_subscriptions(store_id, parsed_urls) @@ -821,7 +808,7 @@ async def process_unsubscribe(self, store_id: bytes32, retain_data: bool) -> Non subscriptions = await self.data_store.get_subscriptions() if store_id not in (subscription.store_id for subscription in subscriptions): raise RuntimeError("No subscription found for the given store_id.") - paths: List[Path] = [] + paths: list[Path] = [] if await self.data_store.store_id_exists(store_id) and not retain_data: generation = await self.data_store.get_tree_generation(store_id) all_roots = await self.data_store.get_roots_between(store_id, 1, generation + 1) @@ -860,11 +847,11 @@ async def process_unsubscribe(self, store_id: bytes32, retain_data: bool) -> Non except FileNotFoundError: pass - async def get_subscriptions(self) -> List[Subscription]: + async def get_subscriptions(self) -> list[Subscription]: async with self.subscription_lock: return await self.data_store.get_subscriptions() - async def add_mirror(self, store_id: bytes32, urls: List[str], amount: uint64, fee: uint64) -> None: + async def add_mirror(self, store_id: bytes32, urls: list[str], amount: uint64, fee: uint64) -> None: if not urls: raise RuntimeError("URL list can't be empty") bytes_urls = [bytes(url, "utf8") for url in urls] @@ -873,22 +860,22 @@ async def add_mirror(self, store_id: bytes32, urls: List[str], amount: uint64, f async def delete_mirror(self, coin_id: bytes32, fee: uint64) -> None: await self.wallet_rpc.dl_delete_mirror(coin_id, fee) - async def get_mirrors(self, store_id: bytes32) -> List[Mirror]: - mirrors: List[Mirror] = await self.wallet_rpc.dl_get_mirrors(store_id) + async def get_mirrors(self, store_id: bytes32) -> list[Mirror]: + mirrors: list[Mirror] = await self.wallet_rpc.dl_get_mirrors(store_id) return [mirror for mirror in mirrors if mirror.urls] async def update_subscriptions_from_wallet(self, store_id: bytes32) -> None: - mirrors: List[Mirror] = await self.wallet_rpc.dl_get_mirrors(store_id) - urls: List[str] = [] + mirrors: list[Mirror] = await self.wallet_rpc.dl_get_mirrors(store_id) + urls: list[str] = [] for mirror in mirrors: urls = urls + [url.decode("utf8") for url in mirror.urls] urls = [url.rstrip("/") for url in urls] await self.data_store.update_subscriptions_from_wallet(store_id, urls) - async def get_owned_stores(self) -> List[SingletonRecord]: + async def get_owned_stores(self) -> list[SingletonRecord]: return await self.wallet_rpc.dl_owned_singletons() - async def get_kv_diff(self, store_id: bytes32, hash_1: bytes32, hash_2: bytes32) -> Set[DiffData]: + async def get_kv_diff(self, store_id: bytes32, hash_1: bytes32, hash_2: bytes32) -> set[DiffData]: return await self.data_store.get_kv_diff(store_id, hash_1, hash_2) async def get_kv_diff_paginated( @@ -1006,10 +993,10 @@ async def update_subscription( async def build_offer_changelist( self, store_id: bytes32, - inclusions: Tuple[KeyValue, ...], - ) -> List[Dict[str, Any]]: + inclusions: tuple[KeyValue, ...], + ) -> list[dict[str, Any]]: async with self.data_store.transaction(): - changelist: List[Dict[str, Any]] = [] + changelist: list[dict[str, Any]] = [] for entry in inclusions: try: existing_value = await self.get_value(store_id=store_id, key=entry.key) @@ -1039,12 +1026,12 @@ async def build_offer_changelist( return changelist - async def process_offered_stores(self, offer_stores: Tuple[OfferStore, ...]) -> Dict[bytes32, StoreProofs]: + async def process_offered_stores(self, offer_stores: tuple[OfferStore, ...]) -> dict[bytes32, StoreProofs]: for offer_store in offer_stores: await self._update_confirmation_status(store_id=offer_store.store_id) async with self.data_store.transaction(): - our_store_proofs: Dict[bytes32, StoreProofs] = {} + our_store_proofs: dict[bytes32, StoreProofs] = {} for offer_store in offer_stores: changelist = await self.build_offer_changelist( store_id=offer_store.store_id, @@ -1066,7 +1053,7 @@ async def process_offered_stores(self, offer_stores: Tuple[OfferStore, ...]) -> if new_root_hash is None: raise Exception("only inserts are supported so a None root hash should not be possible") - proofs: List[Proof] = [] + proofs: list[Proof] = [] for entry in offer_store.inclusions: node_hash = await self.get_key_value_hash( store_id=offer_store.store_id, @@ -1098,19 +1085,19 @@ async def process_offered_stores(self, offer_stores: Tuple[OfferStore, ...]) -> async def make_offer( self, - maker: Tuple[OfferStore, ...], - taker: Tuple[OfferStore, ...], + maker: tuple[OfferStore, ...], + taker: tuple[OfferStore, ...], fee: uint64, ) -> Offer: async with self.data_store.transaction(): our_store_proofs = await self.process_offered_stores(offer_stores=maker) - offer_dict: Dict[Union[uint32, str], int] = { + offer_dict: dict[Union[uint32, str], int] = { **{offer_store.store_id.hex(): -1 for offer_store in maker}, **{offer_store.store_id.hex(): 1 for offer_store in taker}, } - solver: Dict[str, Any] = { + solver: dict[str, Any] = { "0x" + our_offer_store.store_id.hex(): { "new_root": "0x" + our_store_proofs[our_offer_store.store_id].proofs[0].root().hex(), @@ -1158,8 +1145,8 @@ async def make_offer( async def take_offer( self, offer_bytes: bytes, - taker: Tuple[OfferStore, ...], - maker: Tuple[StoreProofs, ...], + taker: tuple[OfferStore, ...], + maker: tuple[StoreProofs, ...], fee: uint64, ) -> TradeRecord: async with self.data_store.transaction(): @@ -1170,10 +1157,10 @@ async def take_offer( verify_offer(maker=maker, taker=taker, summary=summary) - all_store_proofs: Dict[bytes32, StoreProofs] = { + all_store_proofs: dict[bytes32, StoreProofs] = { store_proofs.proofs[0].root(): store_proofs for store_proofs in [*maker, *our_store_proofs.values()] } - proofs_of_inclusion: List[Tuple[str, str, List[str]]] = [] + proofs_of_inclusion: list[tuple[str, str, list[str]]] = [] for root, store_proofs in all_store_proofs.items(): for proof in store_proofs.proofs: layers = [ @@ -1194,7 +1181,7 @@ async def take_offer( ) ) - solver: Dict[str, Any] = { + solver: dict[str, Any] = { "proofs_of_inclusion": proofs_of_inclusion, **{ "0x" @@ -1232,7 +1219,7 @@ async def take_offer( return trade_record async def cancel_offer(self, trade_id: bytes32, secure: bool, fee: uint64) -> None: - store_ids: List[bytes32] = [] + store_ids: list[bytes32] = [] if not secure: trade_record = await self.wallet_rpc.get_offer(trade_id=trade_id, file_contents=True) @@ -1270,7 +1257,7 @@ async def get_sync_status(self, store_id: bytes32) -> SyncStatus: target_generation=singleton_record.generation, ) - async def get_uploaders(self, store_id: bytes32) -> List[PluginRemote]: + async def get_uploaders(self, store_id: bytes32) -> list[PluginRemote]: uploaders = [] for uploader in self.uploaders: async with aiohttp.ClientSession() as session: diff --git a/chia/data_layer/data_layer_errors.py b/chia/data_layer/data_layer_errors.py index 7bb2eb739a67..9f72121e2069 100644 --- a/chia/data_layer/data_layer_errors.py +++ b/chia/data_layer/data_layer_errors.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Iterable, List +from collections.abc import Iterable from chia.types.blockchain_format.sized_bytes import bytes32 @@ -14,7 +14,7 @@ def build_message_with_hashes(message: str, bytes_objects: Iterable[bytes]) -> s class TreeGenerationIncrementingError(IntegrityError): - def __init__(self, store_ids: List[bytes32]) -> None: + def __init__(self, store_ids: list[bytes32]) -> None: super().__init__( build_message_with_hashes( message="Found trees with generations not properly incrementing:", @@ -24,7 +24,7 @@ def __init__(self, store_ids: List[bytes32]) -> None: class NodeHashError(IntegrityError): - def __init__(self, node_hashes: List[bytes32]) -> None: + def __init__(self, node_hashes: list[bytes32]) -> None: super().__init__( build_message_with_hashes( message="Found nodes with incorrect hashes:", diff --git a/chia/data_layer/data_layer_server.py b/chia/data_layer/data_layer_server.py index d06a33455ba2..52e007551faf 100644 --- a/chia/data_layer/data_layer_server.py +++ b/chia/data_layer/data_layer_server.py @@ -7,7 +7,7 @@ from dataclasses import dataclass, field from pathlib import Path from types import FrameType -from typing import Any, Dict, Optional +from typing import Any, Optional import click from aiohttp import web @@ -35,7 +35,7 @@ @dataclass class DataLayerServer: root_path: Path - config: Dict[str, Any] + config: dict[str, Any] log: logging.Logger shutdown_event: asyncio.Event webserver: Optional[WebServer] = None diff --git a/chia/data_layer/data_layer_util.py b/chia/data_layer/data_layer_util.py index f672b4d70a14..d7561ea21a26 100644 --- a/chia/data_layer/data_layer_util.py +++ b/chia/data_layer/data_layer_util.py @@ -4,7 +4,7 @@ from dataclasses import dataclass, field from enum import Enum, IntEnum from hashlib import sha256 -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type, Union +from typing import TYPE_CHECKING, Any, Optional, Union import aiosqlite from typing_extensions import final @@ -52,14 +52,14 @@ def key_hash(key: bytes) -> bytes32: class PaginationData: total_pages: int total_bytes: int - hashes: List[bytes32] + hashes: list[bytes32] -def get_hashes_for_page(page: int, lengths: Dict[bytes32, int], max_page_size: int) -> PaginationData: +def get_hashes_for_page(page: int, lengths: dict[bytes32, int], max_page_size: int) -> PaginationData: current_page = 0 current_page_size = 0 total_bytes = 0 - hashes: List[bytes32] = [] + hashes: list[bytes32] = [] for hash, length in sorted(lengths.items(), key=lambda x: (-x[1], x[0])): if length > max_page_size: raise RuntimeError( @@ -98,9 +98,9 @@ async def _dot_dump( n = 8 - dot_nodes: List[str] = [] - dot_connections: List[str] = [] - dot_pair_boxes: List[str] = [] + dot_nodes: list[str] = [] + dot_connections: list[str] = [] + dot_pair_boxes: list[str] = [] for terminal_node in terminal_nodes: hash = terminal_node.hash.hex() @@ -245,7 +245,7 @@ def from_hashes(cls, primary_hash: bytes32, other_hash_side: Side, other_hash: b class ProofOfInclusion: node_hash: bytes32 # children before parents - layers: List[ProofOfInclusionLayer] + layers: list[ProofOfInclusionLayer] @property def root_hash(self) -> bytes32: @@ -257,7 +257,7 @@ def root_hash(self) -> bytes32: def sibling_sides_integer(self) -> int: return sum(other_side_to_bit[layer.other_hash_side] << index for index, layer in enumerate(self.layers)) - def sibling_hashes(self) -> List[bytes32]: + def sibling_hashes(self) -> list[bytes32]: return [layer.other_hash for layer in self.layers] def as_program(self) -> Program: @@ -366,7 +366,7 @@ def from_row(cls, row: aiosqlite.Row) -> Root: status=Status(row["status"]), ) - def to_row(self) -> Dict[str, Any]: + def to_row(self) -> dict[str, Any]: return { "tree_id": self.store_id, "node_hash": self.node_hash, @@ -375,7 +375,7 @@ def to_row(self) -> Dict[str, Any]: } @classmethod - def unmarshal(cls, marshalled: Dict[str, Any]) -> Root: + def unmarshal(cls, marshalled: dict[str, Any]) -> Root: return cls( store_id=bytes32.from_hexstr(marshalled["tree_id"]), node_hash=None if marshalled["node_hash"] is None else bytes32.from_hexstr(marshalled["node_hash"]), @@ -383,7 +383,7 @@ def unmarshal(cls, marshalled: Dict[str, Any]) -> Root: status=Status(marshalled["status"]), ) - def marshal(self) -> Dict[str, Any]: + def marshal(self) -> dict[str, Any]: return { "tree_id": self.store_id.hex(), "node_hash": None if self.node_hash is None else self.node_hash.hex(), @@ -392,7 +392,7 @@ def marshal(self) -> Dict[str, Any]: } -node_type_to_class: Dict[NodeType, Union[Type[InternalNode], Type[TerminalNode]]] = { +node_type_to_class: dict[NodeType, Union[type[InternalNode], type[TerminalNode]]] = { NodeType.INTERNAL: InternalNode, NodeType.TERMINAL: TerminalNode, } @@ -408,7 +408,7 @@ class ServerInfo: @dataclass(frozen=True) class Subscription: store_id: bytes32 - servers_info: List[ServerInfo] + servers_info: list[ServerInfo] @dataclass(frozen=True) @@ -433,13 +433,13 @@ class KeyValue: value: bytes @classmethod - def unmarshal(cls, marshalled: Dict[str, Any]) -> KeyValue: + def unmarshal(cls, marshalled: dict[str, Any]) -> KeyValue: return cls( key=hexstr_to_bytes(marshalled["key"]), value=hexstr_to_bytes(marshalled["value"]), ) - def marshal(self) -> Dict[str, Any]: + def marshal(self) -> dict[str, Any]: return { "key": self.key.hex(), "value": self.value.hex(), @@ -449,16 +449,16 @@ def marshal(self) -> Dict[str, Any]: @dataclasses.dataclass(frozen=True) class OfferStore: store_id: bytes32 - inclusions: Tuple[KeyValue, ...] + inclusions: tuple[KeyValue, ...] @classmethod - def unmarshal(cls, marshalled: Dict[str, Any]) -> OfferStore: + def unmarshal(cls, marshalled: dict[str, Any]) -> OfferStore: return cls( store_id=bytes32.from_hexstr(marshalled["store_id"]), inclusions=tuple(KeyValue.unmarshal(key_value) for key_value in marshalled["inclusions"]), ) - def marshal(self) -> Dict[str, Any]: + def marshal(self) -> dict[str, Any]: return { "store_id": self.store_id.hex(), "inclusions": [key_value.marshal() for key_value in self.inclusions], @@ -475,14 +475,14 @@ class Layer: combined_hash: bytes32 @classmethod - def unmarshal(cls, marshalled: Dict[str, Any]) -> Layer: + def unmarshal(cls, marshalled: dict[str, Any]) -> Layer: return cls( other_hash_side=Side.unmarshal(marshalled["other_hash_side"]), other_hash=bytes32.from_hexstr(marshalled["other_hash"]), combined_hash=bytes32.from_hexstr(marshalled["combined_hash"]), ) - def marshal(self) -> Dict[str, Any]: + def marshal(self) -> dict[str, Any]: return { "other_hash_side": self.other_hash_side.marshal(), "other_hash": self.other_hash.hex(), @@ -492,19 +492,19 @@ def marshal(self) -> Dict[str, Any]: @dataclasses.dataclass(frozen=True) class MakeOfferRequest: - maker: Tuple[OfferStore, ...] - taker: Tuple[OfferStore, ...] + maker: tuple[OfferStore, ...] + taker: tuple[OfferStore, ...] fee: Optional[uint64] @classmethod - def unmarshal(cls, marshalled: Dict[str, Any]) -> MakeOfferRequest: + def unmarshal(cls, marshalled: dict[str, Any]) -> MakeOfferRequest: return cls( maker=tuple(OfferStore.unmarshal(offer_store) for offer_store in marshalled["maker"]), taker=tuple(OfferStore.unmarshal(offer_store) for offer_store in marshalled["taker"]), fee=None if marshalled["fee"] is None else uint64(marshalled["fee"]), ) - def marshal(self) -> Dict[str, Any]: + def marshal(self) -> dict[str, Any]: return { "maker": [offer_store.marshal() for offer_store in self.maker], "taker": [offer_store.marshal() for offer_store in self.taker], @@ -517,10 +517,10 @@ class Proof: key: bytes value: bytes node_hash: bytes32 - layers: Tuple[Layer, ...] + layers: tuple[Layer, ...] @classmethod - def unmarshal(cls, marshalled: Dict[str, Any]) -> Proof: + def unmarshal(cls, marshalled: dict[str, Any]) -> Proof: return cls( key=hexstr_to_bytes(marshalled["key"]), value=hexstr_to_bytes(marshalled["value"]), @@ -534,7 +534,7 @@ def root(self) -> bytes32: return self.layers[-1].combined_hash - def marshal(self) -> Dict[str, Any]: + def marshal(self) -> dict[str, Any]: return { "key": self.key.hex(), "value": self.value.hex(), @@ -546,16 +546,16 @@ def marshal(self) -> Dict[str, Any]: @dataclasses.dataclass(frozen=True) class StoreProofs: store_id: bytes32 - proofs: Tuple[Proof, ...] + proofs: tuple[Proof, ...] @classmethod - def unmarshal(cls, marshalled: Dict[str, Any]) -> StoreProofs: + def unmarshal(cls, marshalled: dict[str, Any]) -> StoreProofs: return cls( store_id=bytes32.from_hexstr(marshalled["store_id"]), proofs=tuple(Proof.unmarshal(proof) for proof in marshalled["proofs"]), ) - def marshal(self) -> Dict[str, Any]: + def marshal(self) -> dict[str, Any]: return { "store_id": self.store_id.hex(), "proofs": [proof.marshal() for proof in self.proofs], @@ -566,11 +566,11 @@ def marshal(self) -> Dict[str, Any]: class Offer: trade_id: bytes offer: bytes - taker: Tuple[OfferStore, ...] - maker: Tuple[StoreProofs, ...] + taker: tuple[OfferStore, ...] + maker: tuple[StoreProofs, ...] @classmethod - def unmarshal(cls, marshalled: Dict[str, Any]) -> Offer: + def unmarshal(cls, marshalled: dict[str, Any]) -> Offer: return cls( trade_id=bytes32.from_hexstr(marshalled["trade_id"]), offer=hexstr_to_bytes(marshalled["offer"]), @@ -578,7 +578,7 @@ def unmarshal(cls, marshalled: Dict[str, Any]) -> Offer: maker=tuple(StoreProofs.unmarshal(store_proof) for store_proof in marshalled["maker"]), ) - def marshal(self) -> Dict[str, Any]: + def marshal(self) -> dict[str, Any]: return { "trade_id": self.trade_id.hex(), "offer": self.offer.hex(), @@ -593,13 +593,13 @@ class MakeOfferResponse: offer: Offer @classmethod - def unmarshal(cls, marshalled: Dict[str, Any]) -> MakeOfferResponse: + def unmarshal(cls, marshalled: dict[str, Any]) -> MakeOfferResponse: return cls( success=marshalled["success"], offer=Offer.unmarshal(marshalled["offer"]), ) - def marshal(self) -> Dict[str, Any]: + def marshal(self) -> dict[str, Any]: return { "success": self.success, "offer": self.offer.marshal(), @@ -612,13 +612,13 @@ class TakeOfferRequest: fee: Optional[uint64] @classmethod - def unmarshal(cls, marshalled: Dict[str, Any]) -> TakeOfferRequest: + def unmarshal(cls, marshalled: dict[str, Any]) -> TakeOfferRequest: return cls( offer=Offer.unmarshal(marshalled["offer"]), fee=None if marshalled["fee"] is None else uint64(marshalled["fee"]), ) - def marshal(self) -> Dict[str, Any]: + def marshal(self) -> dict[str, Any]: return { "offer": self.offer.marshal(), "fee": None if self.fee is None else int(self.fee), @@ -631,13 +631,13 @@ class TakeOfferResponse: trade_id: bytes32 @classmethod - def unmarshal(cls, marshalled: Dict[str, Any]) -> TakeOfferResponse: + def unmarshal(cls, marshalled: dict[str, Any]) -> TakeOfferResponse: return cls( success=marshalled["success"], trade_id=bytes32.from_hexstr(marshalled["trade_id"]), ) - def marshal(self) -> Dict[str, Any]: + def marshal(self) -> dict[str, Any]: return { "success": self.success, "trade_id": self.trade_id.hex(), @@ -653,7 +653,7 @@ class VerifyOfferResponse: fee: Optional[uint64] = None @classmethod - def unmarshal(cls, marshalled: Dict[str, Any]) -> VerifyOfferResponse: + def unmarshal(cls, marshalled: dict[str, Any]) -> VerifyOfferResponse: return cls( success=marshalled["success"], valid=marshalled["valid"], @@ -661,7 +661,7 @@ def unmarshal(cls, marshalled: Dict[str, Any]) -> VerifyOfferResponse: fee=None if marshalled["fee"] is None else uint64(marshalled["fee"]), ) - def marshal(self) -> Dict[str, Any]: + def marshal(self) -> dict[str, Any]: return { "success": self.success, "valid": self.valid, @@ -678,14 +678,14 @@ class CancelOfferRequest: fee: Optional[uint64] @classmethod - def unmarshal(cls, marshalled: Dict[str, Any]) -> CancelOfferRequest: + def unmarshal(cls, marshalled: dict[str, Any]) -> CancelOfferRequest: return cls( trade_id=bytes32.from_hexstr(marshalled["trade_id"]), secure=marshalled["secure"], fee=None if marshalled["fee"] is None else uint64(marshalled["fee"]), ) - def marshal(self) -> Dict[str, Any]: + def marshal(self) -> dict[str, Any]: return { "trade_id": self.trade_id.hex(), "secure": self.secure, @@ -698,12 +698,12 @@ class CancelOfferResponse: success: bool @classmethod - def unmarshal(cls, marshalled: Dict[str, Any]) -> CancelOfferResponse: + def unmarshal(cls, marshalled: dict[str, Any]) -> CancelOfferResponse: return cls( success=marshalled["success"], ) - def marshal(self) -> Dict[str, Any]: + def marshal(self) -> dict[str, Any]: return { "success": self.success, } @@ -715,12 +715,12 @@ class ClearPendingRootsRequest: store_id: bytes32 @classmethod - def unmarshal(cls, marshalled: Dict[str, Any]) -> ClearPendingRootsRequest: + def unmarshal(cls, marshalled: dict[str, Any]) -> ClearPendingRootsRequest: return cls( store_id=bytes32.from_hexstr(marshalled["store_id"]), ) - def marshal(self) -> Dict[str, Any]: + def marshal(self) -> dict[str, Any]: return { "store_id": self.store_id.hex(), } @@ -738,13 +738,13 @@ class ClearPendingRootsResponse: # status: Status @classmethod - def unmarshal(cls, marshalled: Dict[str, Any]) -> ClearPendingRootsResponse: + def unmarshal(cls, marshalled: dict[str, Any]) -> ClearPendingRootsResponse: return cls( success=marshalled["success"], root=None if marshalled["root"] is None else Root.unmarshal(marshalled["root"]), ) - def marshal(self) -> Dict[str, Any]: + def marshal(self) -> dict[str, Any]: return { "success": self.success, "root": None if self.root is None else self.root.marshal(), @@ -764,10 +764,10 @@ class SyncStatus: class PluginRemote: url: str # repr=False to avoid leaking secrets - headers: Dict[str, str] = dataclasses.field(default_factory=dict, hash=False, repr=False) + headers: dict[str, str] = dataclasses.field(default_factory=dict, hash=False, repr=False) @classmethod - def unmarshal(cls, marshalled: Dict[str, Any]) -> PluginRemote: + def unmarshal(cls, marshalled: dict[str, Any]) -> PluginRemote: return cls( url=marshalled["url"], headers=marshalled.get("headers", {}), @@ -776,10 +776,10 @@ def unmarshal(cls, marshalled: Dict[str, Any]) -> PluginRemote: @dataclasses.dataclass(frozen=True) class PluginStatus: - uploaders: Dict[str, Dict[str, Any]] - downloaders: Dict[str, Dict[str, Any]] + uploaders: dict[str, dict[str, Any]] + downloaders: dict[str, dict[str, Any]] - def marshal(self) -> Dict[str, Any]: + def marshal(self) -> dict[str, Any]: return { "plugin_status": { "uploaders": self.uploaders, @@ -802,9 +802,9 @@ class UnsubscribeData: @dataclasses.dataclass(frozen=True) class KeysValuesCompressed: - keys_values_hashed: Dict[bytes32, bytes32] - key_hash_to_length: Dict[bytes32, int] - leaf_hash_to_length: Dict[bytes32, int] + keys_values_hashed: dict[bytes32, bytes32] + key_hash_to_length: dict[bytes32, int] + leaf_hash_to_length: dict[bytes32, int] root_hash: Optional[bytes32] @@ -812,7 +812,7 @@ class KeysValuesCompressed: class KeysPaginationData: total_pages: int total_bytes: int - keys: List[bytes] + keys: list[bytes] root_hash: Optional[bytes32] @@ -820,7 +820,7 @@ class KeysPaginationData: class KeysValuesPaginationData: total_pages: int total_bytes: int - keys_values: List[TerminalNode] + keys_values: list[TerminalNode] root_hash: Optional[bytes32] @@ -828,7 +828,7 @@ class KeysValuesPaginationData: class KVDiffPaginationData: total_pages: int total_bytes: int - kv_diff: List[DiffData] + kv_diff: list[DiffData] # @@ -849,7 +849,7 @@ class HashOnlyProof(Streamable): key_clvm_hash: bytes32 value_clvm_hash: bytes32 node_hash: bytes32 - layers: List[ProofLayer] + layers: list[ProofLayer] def root(self) -> bytes32: if len(self.layers) == 0: @@ -857,7 +857,7 @@ def root(self) -> bytes32: return self.layers[-1].combined_hash @classmethod - def from_key_value(cls, key: bytes, value: bytes, node_hash: bytes32, layers: List[ProofLayer]) -> HashOnlyProof: + def from_key_value(cls, key: bytes, value: bytes, node_hash: bytes32, layers: list[ProofLayer]) -> HashOnlyProof: return cls( key_clvm_hash=Program.to(key).get_tree_hash(), value_clvm_hash=Program.to(value).get_tree_hash(), @@ -877,21 +877,21 @@ class KeyValueHashes(Streamable): @dataclasses.dataclass(frozen=True) class ProofResultInclusions(Streamable): store_id: bytes32 - inclusions: List[KeyValueHashes] + inclusions: list[KeyValueHashes] @streamable @dataclasses.dataclass(frozen=True) class GetProofRequest(Streamable): store_id: bytes32 - keys: List[bytes] + keys: list[bytes] @streamable @dataclasses.dataclass(frozen=True) class StoreProofsHashes(Streamable): store_id: bytes32 - proofs: List[HashOnlyProof] + proofs: list[HashOnlyProof] @streamable @@ -917,10 +917,10 @@ class VerifyProofResponse(Streamable): success: bool -def dl_verify_proof_internal(dl_proof: DLProof, puzzle_hash: bytes32) -> List[KeyValueHashes]: +def dl_verify_proof_internal(dl_proof: DLProof, puzzle_hash: bytes32) -> list[KeyValueHashes]: """Verify a proof of inclusion for a DL singleton""" - verified_keys: List[KeyValueHashes] = [] + verified_keys: list[KeyValueHashes] = [] for reference_proof in dl_proof.store_proofs.proofs: inner_puz_hash = dl_proof.inner_puzzle_hash @@ -962,10 +962,10 @@ def dl_verify_proof_internal(dl_proof: DLProof, puzzle_hash: bytes32) -> List[Ke async def dl_verify_proof( - request: Dict[str, Any], + request: dict[str, Any], wallet_node: WalletNode, peer: WSChiaConnection, -) -> Dict[str, Any]: +) -> dict[str, Any]: """Verify a proof of inclusion for a DL singleton""" dlproof = DLProof.from_json_dict(request) diff --git a/chia/data_layer/data_layer_wallet.py b/chia/data_layer/data_layer_wallet.py index a6cd126f57d8..76195d2f4b8b 100644 --- a/chia/data_layer/data_layer_wallet.py +++ b/chia/data_layer/data_layer_wallet.py @@ -3,7 +3,7 @@ import dataclasses import logging import time -from typing import TYPE_CHECKING, Any, ClassVar, Dict, List, Optional, Set, Tuple, cast +from typing import TYPE_CHECKING, Any, ClassVar, Optional, cast from chia_rs import G1Element, G2Element from clvm.EvalError import EvalError @@ -88,11 +88,11 @@ class Mirror: coin_id: bytes32 launcher_id: bytes32 amount: uint64 - urls: List[bytes] + urls: list[bytes] ours: bool confirmed_at_height: Optional[uint32] - def to_json_dict(self) -> Dict[str, Any]: + def to_json_dict(self) -> dict[str, Any]: return { "coin_id": self.coin_id.hex(), "launcher_id": self.launcher_id.hex(), @@ -103,7 +103,7 @@ def to_json_dict(self) -> Dict[str, Any]: } @classmethod - def from_json_dict(cls, json_dict: Dict[str, Any]) -> Mirror: + def from_json_dict(cls, json_dict: dict[str, Any]) -> Mirror: return cls( bytes32.from_hexstr(json_dict["coin_id"]), bytes32.from_hexstr(json_dict["launcher_id"]), @@ -180,7 +180,7 @@ async def create_new_dl_wallet(cls, wallet_state_manager: WalletStateManager) -> ############# @staticmethod - async def match_dl_launcher(launcher_spend: CoinSpend) -> Tuple[bool, Optional[bytes32]]: + async def match_dl_launcher(launcher_spend: CoinSpend) -> tuple[bool, Optional[bytes32]]: # Sanity check it's a launcher if launcher_spend.puzzle_reveal.to_program() != SINGLETON_LAUNCHER: return False, None @@ -204,7 +204,7 @@ async def match_dl_launcher(launcher_spend: CoinSpend) -> Tuple[bool, Optional[b return True, inner_puzhash async def get_launcher_coin_state(self, launcher_id: bytes32, peer: WSChiaConnection) -> CoinState: - coin_states: List[CoinState] = await self.wallet_state_manager.wallet_node.get_coin_state( + coin_states: list[CoinState] = await self.wallet_state_manager.wallet_node.get_coin_state( [launcher_id], peer=peer ) @@ -302,13 +302,13 @@ async def generate_new_reporter( initial_root: bytes32, action_scope: WalletActionScope, fee: uint64 = uint64(0), - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> bytes32: """ Creates the initial singleton, which includes spending an origin coin, the launcher, and creating a singleton """ - coins: Set[Coin] = await self.standard_wallet.select_coins(uint64(fee + 1), action_scope) + coins: set[Coin] = await self.standard_wallet.select_coins(uint64(fee + 1), action_scope) if coins is None: raise ValueError("Not enough coins to create new data layer singleton") @@ -393,7 +393,7 @@ async def create_update_state_spend( fee: uint64 = uint64(0), add_pending_singleton: bool = True, announce_new_state: bool = False, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> None: singleton_record, parent_lineage = await self.get_spendable_singleton_info(launcher_id) @@ -606,13 +606,13 @@ async def create_update_state_spend( async def generate_signed_transaction( self, - amounts: List[uint64], - puzzle_hashes: List[bytes32], + amounts: list[uint64], + puzzle_hashes: list[bytes32], action_scope: WalletActionScope, fee: uint64 = uint64(0), - coins: Set[Coin] = set(), - memos: Optional[List[List[bytes]]] = None, # ignored - extra_conditions: Tuple[Condition, ...] = tuple(), + coins: set[Coin] = set(), + memos: Optional[list[list[bytes]]] = None, # ignored + extra_conditions: tuple[Condition, ...] = tuple(), **kwargs: Unpack[GSTOptionalArgs], ) -> None: launcher_id: Optional[bytes32] = kwargs.get("launcher_id", None) @@ -648,7 +648,7 @@ async def generate_signed_transaction( extra_conditions, ) - async def get_spendable_singleton_info(self, launcher_id: bytes32) -> Tuple[SingletonRecord, LineageProof]: + async def get_spendable_singleton_info(self, launcher_id: bytes32) -> tuple[SingletonRecord, LineageProof]: # First, let's make sure this is a singleton that we track and that we can spend singleton_record: Optional[SingletonRecord] = await self.get_latest_singleton(launcher_id) if singleton_record is None: @@ -681,7 +681,7 @@ async def get_spendable_singleton_info(self, launcher_id: bytes32) -> Tuple[Sing return singleton_record, parent_lineage - async def get_owned_singletons(self) -> List[SingletonRecord]: + async def get_owned_singletons(self) -> list[SingletonRecord]: launcher_ids = await self.wallet_state_manager.dl_store.get_all_launchers() collected = [] @@ -706,10 +706,10 @@ async def create_new_mirror( self, launcher_id: bytes32, amount: uint64, - urls: List[bytes], + urls: list[bytes], action_scope: WalletActionScope, fee: uint64 = uint64(0), - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> None: await self.standard_wallet.generate_signed_transaction( amount=amount, @@ -727,7 +727,7 @@ async def delete_mirror( peer: WSChiaConnection, action_scope: WalletActionScope, fee: uint64 = uint64(0), - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> None: mirror: Mirror = await self.get_mirror(mirror_id) mirror_coin: Coin = (await self.wallet_state_manager.wallet_node.get_coin_state([mirror.coin_id], peer=peer))[ @@ -918,8 +918,8 @@ async def get_history( min_generation: Optional[uint32] = None, max_generation: Optional[uint32] = None, num_results: Optional[uint32] = None, - ) -> List[SingletonRecord]: - history: List[SingletonRecord] = await self.wallet_state_manager.dl_store.get_all_singletons_for_launcher( + ) -> list[SingletonRecord]: + history: list[SingletonRecord] = await self.wallet_state_manager.dl_store.get_all_singletons_for_launcher( launcher_id, min_generation, max_generation, @@ -931,13 +931,13 @@ async def get_singleton_record(self, coin_id: bytes32) -> Optional[SingletonReco singleton: Optional[SingletonRecord] = await self.wallet_state_manager.dl_store.get_singleton_record(coin_id) return singleton - async def get_singletons_by_root(self, launcher_id: bytes32, root: bytes32) -> List[SingletonRecord]: - singletons: List[SingletonRecord] = await self.wallet_state_manager.dl_store.get_singletons_by_root( + async def get_singletons_by_root(self, launcher_id: bytes32, root: bytes32) -> list[SingletonRecord]: + singletons: list[SingletonRecord] = await self.wallet_state_manager.dl_store.get_singletons_by_root( launcher_id, root ) return singletons - async def get_mirrors_for_launcher(self, launcher_id: bytes32) -> List[Mirror]: + async def get_mirrors_for_launcher(self, launcher_id: bytes32) -> list[Mirror]: return await self.wallet_state_manager.dl_store.get_mirrors(launcher_id) async def get_mirror(self, coin_id: bytes32) -> Mirror: @@ -968,19 +968,19 @@ async def get_new_puzzlehash(self) -> bytes32: async def new_peak(self, peak: BlockRecord) -> None: pass - async def get_confirmed_balance(self, record_list: Optional[Set[WalletCoinRecord]] = None) -> uint128: + async def get_confirmed_balance(self, record_list: Optional[set[WalletCoinRecord]] = None) -> uint128: return uint128(0) - async def get_unconfirmed_balance(self, record_list: Optional[Set[WalletCoinRecord]] = None) -> uint128: + async def get_unconfirmed_balance(self, record_list: Optional[set[WalletCoinRecord]] = None) -> uint128: return uint128(0) - async def get_spendable_balance(self, unspent_records: Optional[Set[WalletCoinRecord]] = None) -> uint128: + async def get_spendable_balance(self, unspent_records: Optional[set[WalletCoinRecord]] = None) -> uint128: return uint128(0) async def get_pending_change_balance(self) -> uint64: return uint64(0) - async def get_max_send_amount(self, unspent_records: Optional[Set[WalletCoinRecord]] = None) -> uint128: + async def get_max_send_amount(self, unspent_records: Optional[set[WalletCoinRecord]] = None) -> uint128: return uint128(0) def get_name(self) -> str: @@ -1007,7 +1007,7 @@ async def get_puzzle_info(self, launcher_id: bytes32) -> PuzzleInfo: } ) - async def get_coins_to_offer(self, launcher_id: bytes32, *args: Any, **kwargs: Any) -> Set[Coin]: + async def get_coins_to_offer(self, launcher_id: bytes32, *args: Any, **kwargs: Any) -> set[Coin]: record = await self.get_latest_singleton(launcher_id) if record is None: raise ValueError(f"DL wallet does not know about launcher ID {launcher_id}") @@ -1021,12 +1021,12 @@ async def get_coins_to_offer(self, launcher_id: bytes32, *args: Any, **kwargs: A @staticmethod async def make_update_offer( wallet_state_manager: Any, - offer_dict: Dict[Optional[bytes32], int], - driver_dict: Dict[bytes32, PuzzleInfo], + offer_dict: dict[Optional[bytes32], int], + driver_dict: dict[bytes32, PuzzleInfo], solver: Solver, action_scope: WalletActionScope, fee: uint64 = uint64(0), - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> Offer: dl_wallet = None for wallet in wallet_state_manager.wallets.values(): @@ -1036,9 +1036,9 @@ async def make_update_offer( if dl_wallet is None: raise ValueError("DL Wallet is not initialized") - offered_launchers: List[bytes32] = [k for k, v in offer_dict.items() if v < 0 and k is not None] + offered_launchers: list[bytes32] = [k for k, v in offer_dict.items() if v < 0 and k is not None] fee_left_to_pay: uint64 = fee - all_transactions: List[TransactionRecord] = [] + all_transactions: list[TransactionRecord] = [] for launcher in offered_launchers: try: this_solver: Solver = solver[launcher.hex()] @@ -1125,9 +1125,9 @@ async def make_update_offer( @staticmethod async def finish_graftroot_solutions(offer: Offer, solver: Solver) -> Offer: # Build a mapping of launcher IDs to their new innerpuz - singleton_to_innerpuzhash: Dict[bytes32, bytes32] = {} - singleton_to_root: Dict[bytes32, bytes32] = {} - all_parent_ids: List[bytes32] = [cs.coin.parent_coin_info for cs in offer.coin_spends()] + singleton_to_innerpuzhash: dict[bytes32, bytes32] = {} + singleton_to_root: dict[bytes32, bytes32] = {} + all_parent_ids: list[bytes32] = [cs.coin.parent_coin_info for cs in offer.coin_spends()] for spend in offer.coin_spends(): matched, curried_args = match_dl_singleton(spend.puzzle_reveal) if matched and spend.coin.name() not in all_parent_ids: @@ -1137,7 +1137,7 @@ async def finish_graftroot_solutions(offer: Offer, solver: Solver) -> Offer: singleton_to_innerpuzhash[singleton_struct] = innerpuz.get_tree_hash() # Create all of the new solutions - new_spends: List[CoinSpend] = [] + new_spends: list[CoinSpend] = [] for spend in offer.coin_spends(): solution = spend.solution.to_program() if match_dl_singleton(spend.puzzle_reveal)[0]: @@ -1157,7 +1157,7 @@ async def finish_graftroot_solutions(offer: Offer, solver: Solver) -> Offer: for value in values: for proof_of_inclusion in solver["proofs_of_inclusion"]: root: str = proof_of_inclusion[0] - proof: Tuple[int, List[bytes32]] = (proof_of_inclusion[1], proof_of_inclusion[2]) + proof: tuple[int, list[bytes32]] = (proof_of_inclusion[1], proof_of_inclusion[2]) calculated_root: bytes32 = _simplify_merkle_proof(value, proof) if ( calculated_root == bytes32.from_hexstr(root) @@ -1194,8 +1194,8 @@ async def finish_graftroot_solutions(offer: Offer, solver: Solver) -> Offer: return Offer({}, WalletSpendBundle(new_spends, offer.aggregated_signature()), offer.driver_dict) @staticmethod - async def get_offer_summary(offer: Offer) -> Dict[str, Any]: - summary: Dict[str, Any] = {"offered": []} + async def get_offer_summary(offer: Offer) -> dict[str, Any]: + summary: dict[str, Any] = {"offered": []} for spend in offer.coin_spends(): solution = spend.solution.to_program() matched, curried_args = match_dl_singleton(spend.puzzle_reveal) @@ -1230,7 +1230,7 @@ async def select_coins( self, amount: uint64, action_scope: WalletActionScope, - ) -> Set[Coin]: + ) -> set[Coin]: raise RuntimeError("DataLayerWallet does not support select_coins()") async def match_hinted_coin(self, coin: Coin, hint: bytes32) -> bool: @@ -1238,9 +1238,9 @@ async def match_hinted_coin(self, coin: Coin, hint: bytes32) -> bool: def verify_offer( - maker: Tuple[StoreProofs, ...], - taker: Tuple[OfferStore, ...], - summary: Dict[str, Any], + maker: tuple[StoreProofs, ...], + taker: tuple[OfferStore, ...], + summary: dict[str, Any], ) -> None: # TODO: consistency in error messages # TODO: custom exceptions @@ -1252,7 +1252,7 @@ def verify_offer( raise OfferIntegrityError("maker: repeated store id") for store_proof in maker: - proofs: List[ProofOfInclusion] = [] + proofs: list[ProofOfInclusion] = [] for reference_proof in store_proof.proofs: proof = ProofOfInclusion( node_hash=reference_proof.node_hash, diff --git a/chia/data_layer/data_store.py b/chia/data_layer/data_store.py index 9fa25e1ad539..2132728bebca 100644 --- a/chia/data_layer/data_store.py +++ b/chia/data_layer/data_store.py @@ -3,10 +3,11 @@ import contextlib import logging from collections import defaultdict +from collections.abc import AsyncIterator, Awaitable from contextlib import asynccontextmanager from dataclasses import dataclass, replace from pathlib import Path -from typing import Any, AsyncIterator, Awaitable, BinaryIO, Callable, Dict, List, Optional, Set, Tuple, Union +from typing import Any, BinaryIO, Callable, Optional, Union import aiosqlite @@ -503,7 +504,7 @@ async def _check_roots_are_incrementing(self) -> None: cursor = await reader.execute("SELECT * FROM root ORDER BY tree_id, generation") roots = [Root.from_row(row=row) async for row in cursor] - roots_by_tree: Dict[bytes32, List[Root]] = defaultdict(list) + roots_by_tree: dict[bytes32, list[Root]] = defaultdict(list) for root in roots: roots_by_tree[root.store_id].append(root) @@ -522,7 +523,7 @@ async def _check_hashes(self) -> None: async with self.db_wrapper.reader() as reader: cursor = await reader.execute("SELECT * FROM node") - bad_node_hashes: List[bytes32] = [] + bad_node_hashes: list[bytes32] = [] async for row in cursor: node = row_to_node(row=row) if isinstance(node, InternalNode): @@ -538,7 +539,7 @@ async def _check_hashes(self) -> None: if len(bad_node_hashes) > 0: raise NodeHashError(node_hashes=bad_node_hashes) - _checks: Tuple[Callable[[DataStore], Awaitable[None]], ...] = ( + _checks: tuple[Callable[[DataStore], Awaitable[None]], ...] = ( _check_roots_are_incrementing, _check_hashes, ) @@ -553,7 +554,7 @@ async def table_is_empty(self, store_id: bytes32) -> bool: return tree_root.node_hash is None - async def get_store_ids(self) -> Set[bytes32]: + async def get_store_ids(self) -> set[bytes32]: async with self.db_wrapper.reader() as reader: cursor = await reader.execute("SELECT DISTINCT tree_id FROM root") @@ -597,7 +598,7 @@ async def get_tree_root(self, store_id: bytes32, generation: Optional[int] = Non return Root.from_row(row=row) - async def get_all_pending_batches_roots(self) -> List[Root]: + async def get_all_pending_batches_roots(self) -> list[Root]: async with self.db_wrapper.reader() as reader: cursor = await reader.execute( """ @@ -623,7 +624,7 @@ async def store_id_exists(self, store_id: bytes32) -> bool: return False return True - async def get_roots_between(self, store_id: bytes32, generation_begin: int, generation_end: int) -> List[Root]: + async def get_roots_between(self, store_id: bytes32, generation_begin: int, generation_end: int) -> list[Root]: async with self.db_wrapper.reader() as reader: cursor = await reader.execute( "SELECT * FROM root WHERE tree_id == :tree_id " @@ -658,7 +659,7 @@ async def get_ancestors( node_hash: bytes32, store_id: bytes32, root_hash: Optional[bytes32] = None, - ) -> List[InternalNode]: + ) -> list[InternalNode]: async with self.db_wrapper.reader() as reader: if root_hash is None: root = await self.get_tree_root(store_id=store_id) @@ -702,7 +703,7 @@ async def get_ancestors_optimized( store_id: bytes32, generation: Optional[int] = None, root_hash: Optional[bytes32] = None, - ) -> List[InternalNode]: + ) -> list[InternalNode]: async with self.db_wrapper.reader(): nodes = [] if root_hash is None: @@ -725,7 +726,7 @@ async def get_ancestors_optimized( return nodes - async def get_internal_nodes(self, store_id: bytes32, root_hash: Optional[bytes32] = None) -> List[InternalNode]: + async def get_internal_nodes(self, store_id: bytes32, root_hash: Optional[bytes32] = None) -> list[InternalNode]: async with self.db_wrapper.reader() as reader: if root_hash is None: root = await self.get_tree_root(store_id=store_id) @@ -745,7 +746,7 @@ async def get_internal_nodes(self, store_id: bytes32, root_hash: Optional[bytes3 {"root_hash": root_hash, "node_type": NodeType.INTERNAL}, ) - internal_nodes: List[InternalNode] = [] + internal_nodes: list[InternalNode] = [] async for row in cursor: node = row_to_node(row=row) if not isinstance(node, InternalNode): @@ -791,7 +792,7 @@ async def get_keys_values( self, store_id: bytes32, root_hash: Union[bytes32, Unspecified] = unspecified, - ) -> List[TerminalNode]: + ) -> list[TerminalNode]: async with self.db_wrapper.reader() as reader: resolved_root_hash: Optional[bytes32] if root_hash is unspecified: @@ -801,7 +802,7 @@ async def get_keys_values( resolved_root_hash = root_hash cursor = await self.get_keys_values_cursor(reader, resolved_root_hash) - terminal_nodes: List[TerminalNode] = [] + terminal_nodes: list[TerminalNode] = [] async for row in cursor: if row["depth"] > 62: # TODO: Review the value and implementation of left-to-right order @@ -836,9 +837,9 @@ async def get_keys_values_compressed( resolved_root_hash = root_hash cursor = await self.get_keys_values_cursor(reader, resolved_root_hash) - keys_values_hashed: Dict[bytes32, bytes32] = {} - key_hash_to_length: Dict[bytes32, int] = {} - leaf_hash_to_length: Dict[bytes32, int] = {} + keys_values_hashed: dict[bytes32, bytes32] = {} + key_hash_to_length: dict[bytes32, int] = {} + leaf_hash_to_length: dict[bytes32, int] = {} async for row in cursor: if row["depth"] > 62: raise Exception("Tree depth exceeded 62, unable to guarantee left-to-right node order.") @@ -853,8 +854,8 @@ async def get_keys_values_compressed( async def get_leaf_hashes_by_hashed_key( self, store_id: bytes32, root_hash: Optional[bytes32] = None - ) -> Dict[bytes32, bytes32]: - result: Dict[bytes32, bytes32] = {} + ) -> dict[bytes32, bytes32]: + result: dict[bytes32, bytes32] = {} async with self.db_wrapper.reader() as reader: if root_hash is None: root = await self.get_tree_root(store_id=store_id) @@ -876,7 +877,7 @@ async def get_keys_paginated( keys_values_compressed = await self.get_keys_values_compressed(store_id, root_hash) pagination_data = get_hashes_for_page(page, keys_values_compressed.key_hash_to_length, max_page_size) - keys: List[bytes] = [] + keys: list[bytes] = [] for hash in pagination_data.hashes: leaf_hash = keys_values_compressed.keys_values_hashed[hash] node = await self.get_node(leaf_hash) @@ -900,7 +901,7 @@ async def get_keys_values_paginated( keys_values_compressed = await self.get_keys_values_compressed(store_id, root_hash) pagination_data = get_hashes_for_page(page, keys_values_compressed.leaf_hash_to_length, max_page_size) - keys_values: List[TerminalNode] = [] + keys_values: list[TerminalNode] = [] for hash in pagination_data.hashes: node = await self.get_node(hash) assert isinstance(node, TerminalNode) @@ -941,7 +942,7 @@ async def get_kv_diff_paginated( lengths[hash] = old_pairs.leaf_hash_to_length[hash] pagination_data = get_hashes_for_page(page, lengths, max_page_size) - kv_diff: List[DiffData] = [] + kv_diff: list[DiffData] = [] for hash in pagination_data.hashes: node = await self.get_node(hash) @@ -1063,7 +1064,7 @@ async def get_keys_values_dict( self, store_id: bytes32, root_hash: Union[bytes32, Unspecified] = unspecified, - ) -> Dict[bytes, bytes]: + ) -> dict[bytes, bytes]: pairs = await self.get_keys_values(store_id=store_id, root_hash=root_hash) return {node.key: node.value for node in pairs} @@ -1071,7 +1072,7 @@ async def get_keys( self, store_id: bytes32, root_hash: Union[bytes32, Unspecified] = unspecified, - ) -> List[bytes]: + ) -> list[bytes]: async with self.db_wrapper.reader() as reader: if root_hash is unspecified: root = await self.get_tree_root(store_id=store_id) @@ -1094,7 +1095,7 @@ async def get_keys( {"root_hash": resolved_root_hash, "node_type": NodeType.TERMINAL}, ) - keys: List[bytes] = [row["key"] async for row in cursor] + keys: list[bytes] = [row["key"] async for row in cursor] return keys @@ -1105,9 +1106,9 @@ async def get_ancestors_common( root_hash: Optional[bytes32], generation: Optional[int] = None, use_optimized: bool = True, - ) -> List[InternalNode]: + ) -> list[InternalNode]: if use_optimized: - ancestors: List[InternalNode] = await self.get_ancestors_optimized( + ancestors: list[InternalNode] = await self.get_ancestors_optimized( node_hash=node_hash, store_id=store_id, generation=generation, @@ -1120,7 +1121,7 @@ async def get_ancestors_common( generation=generation, root_hash=root_hash, ) - ancestors_2: List[InternalNode] = await self.get_ancestors( + ancestors_2: list[InternalNode] = await self.get_ancestors( node_hash=node_hash, store_id=store_id, root_hash=root_hash ) if ancestors != ancestors_2: @@ -1136,12 +1137,12 @@ async def update_ancestor_hashes_on_insert( left: bytes32, right: bytes32, traversal_node_hash: bytes32, - ancestors: List[InternalNode], + ancestors: list[InternalNode], status: Status, root: Root, ) -> Root: # update ancestors after inserting root, to keep table constraints. - insert_ancestors_cache: List[Tuple[bytes32, bytes32, bytes32]] = [] + insert_ancestors_cache: list[tuple[bytes32, bytes32, bytes32]] = [] new_generation = root.generation + 1 # create first new internal node new_hash = await self._insert_internal_node(left_hash=left, right_hash=right) @@ -1273,7 +1274,7 @@ async def delete( log.debug(f"Request to delete an unknown key ignored: {key.hex()}") return root - ancestors: List[InternalNode] = await self.get_ancestors_common( + ancestors: list[InternalNode] = await self.get_ancestors_common( node_hash=node_hash, store_id=store_id, root_hash=root_hash, @@ -1306,7 +1307,7 @@ async def delete( else: new_generation = root.generation + 1 # update ancestors after inserting root, to keep table constraints. - insert_ancestors_cache: List[Tuple[bytes32, bytes32, bytes32]] = [] + insert_ancestors_cache: list[tuple[bytes32, bytes32, bytes32]] = [] # more parents to handle so let's traverse them for ancestor in ancestors[1:]: if ancestor.left_hash == old_child_hash: @@ -1433,7 +1434,7 @@ async def clean_node_table(self, writer: Optional[aiosqlite.Connection] = None) else: await writer.execute(query, params) - async def get_nodes(self, node_hashes: List[bytes32]) -> List[Node]: + async def get_nodes(self, node_hashes: list[bytes32]) -> list[Node]: query_parameter_place_holders = ",".join("?" for _ in node_hashes) async with self.db_wrapper.reader() as reader: # TODO: handle SQLITE_MAX_VARIABLE_NUMBER @@ -1452,9 +1453,9 @@ async def get_nodes(self, node_hashes: List[bytes32]) -> List[Node]: return [hash_to_node[node_hash] for node_hash in node_hashes] async def get_leaf_at_minimum_height( - self, root_hash: bytes32, hash_to_parent: Dict[bytes32, InternalNode] + self, root_hash: bytes32, hash_to_parent: dict[bytes32, InternalNode] ) -> TerminalNode: - queue: List[bytes32] = [root_hash] + queue: list[bytes32] = [root_hash] batch_size = min(500, SQLITE_MAX_VARIABLE_NUMBER - 10) while True: @@ -1473,8 +1474,8 @@ async def get_leaf_at_minimum_height( async def batch_upsert( self, hash: bytes32, - to_update_hashes: Set[bytes32], - pending_upsert_new_hashes: Dict[bytes32, bytes32], + to_update_hashes: set[bytes32], + pending_upsert_new_hashes: dict[bytes32, bytes32], ) -> bytes32: if hash not in to_update_hashes: return hash @@ -1488,7 +1489,7 @@ async def batch_upsert( async def insert_batch( self, store_id: bytes32, - changelist: List[Dict[str, Any]], + changelist: list[dict[str, Any]], status: Status = Status.PENDING, enable_batch_autoinsert: bool = True, ) -> Optional[bytes32]: @@ -1510,9 +1511,9 @@ async def insert_batch( assert latest_local_root is not None - key_hash_frequency: Dict[bytes32, int] = {} - first_action: Dict[bytes32, str] = {} - last_action: Dict[bytes32, str] = {} + key_hash_frequency: dict[bytes32, int] = {} + first_action: dict[bytes32, str] = {} + last_action: dict[bytes32, str] = {} for change in changelist: key = change["key"] @@ -1522,8 +1523,8 @@ async def insert_batch( first_action[hash] = change["action"] last_action[hash] = change["action"] - pending_autoinsert_hashes: List[bytes32] = [] - pending_upsert_new_hashes: Dict[bytes32, bytes32] = {} + pending_autoinsert_hashes: list[bytes32] = [] + pending_upsert_new_hashes: dict[bytes32, bytes32] = {} leaf_hashes = await self.get_leaf_hashes_by_hashed_key(store_id) for change in changelist: @@ -1598,8 +1599,8 @@ async def insert_batch( raise Exception(f"Operation in batch is not insert or delete: {change}") if len(pending_upsert_new_hashes) > 0: - to_update_hashes: Set[bytes32] = set(pending_upsert_new_hashes.keys()) - to_update_queue: List[bytes32] = list(pending_upsert_new_hashes.keys()) + to_update_hashes: set[bytes32] = set(pending_upsert_new_hashes.keys()) + to_update_queue: list[bytes32] = list(pending_upsert_new_hashes.keys()) batch_size = min(500, SQLITE_MAX_VARIABLE_NUMBER - 10) while len(to_update_queue) > 0: @@ -1622,7 +1623,7 @@ async def insert_batch( # Start with the leaf nodes and pair them to form new nodes at the next level up, repeating this process # in a bottom-up fashion until a single root node remains. This constructs a balanced tree from the leaves. while len(pending_autoinsert_hashes) > 1: - new_hashes: List[bytes32] = [] + new_hashes: list[bytes32] = [] for i in range(0, len(pending_autoinsert_hashes) - 1, 2): internal_node_hash = await self._insert_internal_node( pending_autoinsert_hashes[i], pending_autoinsert_hashes[i + 1] @@ -1638,9 +1639,9 @@ async def insert_batch( if latest_local_root is None or latest_local_root.node_hash is None: await self._insert_root(store_id=store_id, node_hash=subtree_hash, status=Status.COMMITTED) else: - hash_to_parent: Dict[bytes32, InternalNode] = {} + hash_to_parent: dict[bytes32, InternalNode] = {} min_height_leaf = await self.get_leaf_at_minimum_height(latest_local_root.node_hash, hash_to_parent) - ancestors: List[InternalNode] = [] + ancestors: list[InternalNode] = [] hash = min_height_leaf.hash while hash in hash_to_parent: node = hash_to_parent[hash] @@ -1712,10 +1713,10 @@ async def _get_one_ancestor( async def _get_one_ancestor_multiple_hashes( self, - node_hashes: List[bytes32], + node_hashes: list[bytes32], store_id: bytes32, generation: Optional[int] = None, - ) -> List[InternalNode]: + ) -> list[InternalNode]: async with self.db_wrapper.reader() as reader: node_hashes_place_holders = ",".join("?" for _ in node_hashes) if generation is None: @@ -1747,14 +1748,14 @@ async def build_ancestor_table_for_latest_root(self, store_id: bytes32) -> None: ) if previous_root.node_hash is not None: - previous_internal_nodes: List[InternalNode] = await self.get_internal_nodes( + previous_internal_nodes: list[InternalNode] = await self.get_internal_nodes( store_id=store_id, root_hash=previous_root.node_hash, ) - known_hashes: Set[bytes32] = {node.hash for node in previous_internal_nodes} + known_hashes: set[bytes32] = {node.hash for node in previous_internal_nodes} else: known_hashes = set() - internal_nodes: List[InternalNode] = await self.get_internal_nodes( + internal_nodes: list[InternalNode] = await self.get_internal_nodes( store_id=store_id, root_hash=root.node_hash, ) @@ -1809,7 +1810,7 @@ async def get_node_by_key_latest_generation(self, key: bytes, store_id: bytes32) return node async def maybe_get_node_from_key_hash( - self, leaf_hashes: Dict[bytes32, bytes32], hash: bytes32 + self, leaf_hashes: dict[bytes32, bytes32], hash: bytes32 ) -> Optional[TerminalNode]: if hash in leaf_hashes: leaf_hash = leaf_hashes[hash] @@ -1875,7 +1876,7 @@ async def get_tree_as_nodes(self, store_id: bytes32) -> Node: {"root_hash": root_node.hash}, ) nodes = [row_to_node(row=row) async for row in cursor] - hash_to_node: Dict[bytes32, Node] = {} + hash_to_node: dict[bytes32, Node] = {} for node in reversed(nodes): if isinstance(node, InternalNode): node = replace(node, left=hash_to_node[node.left_hash], right=hash_to_node[node.right_hash]) @@ -1904,7 +1905,7 @@ async def get_proof_of_inclusion_by_hash( else: ancestors = await self.get_ancestors(node_hash=node_hash, store_id=store_id, root_hash=root_hash) - layers: List[ProofOfInclusionLayer] = [] + layers: list[ProofOfInclusionLayer] = [] child_hash = node_hash for parent in ancestors: layer = ProofOfInclusionLayer.from_internal_node(internal_node=parent, traversal_child_hash=child_hash) @@ -1981,7 +1982,7 @@ async def write_tree_to_file( writer.write(len(to_write).to_bytes(4, byteorder="big")) writer.write(to_write) - async def update_subscriptions_from_wallet(self, store_id: bytes32, new_urls: List[str]) -> None: + async def update_subscriptions_from_wallet(self, store_id: bytes32, new_urls: list[str]) -> None: async with self.db_wrapper.writer() as writer: cursor = await writer.execute( "SELECT * FROM subscriptions WHERE from_wallet == 1 AND tree_id == :tree_id", @@ -2053,7 +2054,7 @@ async def subscribe(self, subscription: Subscription) -> None: }, ) - async def remove_subscriptions(self, store_id: bytes32, urls: List[str]) -> None: + async def remove_subscriptions(self, store_id: bytes32, urls: list[str]) -> None: async with self.db_wrapper.writer() as writer: for url in urls: await writer.execute( @@ -2099,8 +2100,8 @@ async def delete_store_data(self, store_id: bytes32) -> None: "pending_batch_status": Status.PENDING_BATCH.value, }, ) - to_delete: Dict[bytes, Tuple[bytes, bytes]] = {} - ref_counts: Dict[bytes, int] = {} + to_delete: dict[bytes, tuple[bytes, bytes]] = {} + ref_counts: dict[bytes, int] = {} async for row in cursor: hash = row["hash"] left = row["left"] @@ -2195,7 +2196,7 @@ async def server_misses_file(self, store_id: bytes32, server_info: ServerInfo, t await self.update_server_info(store_id, new_server_info) return new_server_info - async def get_available_servers_for_store(self, store_id: bytes32, timestamp: int) -> List[ServerInfo]: + async def get_available_servers_for_store(self, store_id: bytes32, timestamp: int) -> list[ServerInfo]: subscriptions = await self.get_subscriptions() subscription = next((subscription for subscription in subscriptions if subscription.store_id == store_id), None) if subscription is None: @@ -2206,8 +2207,8 @@ async def get_available_servers_for_store(self, store_id: bytes32, timestamp: in servers_info.append(server_info) return servers_info - async def get_subscriptions(self) -> List[Subscription]: - subscriptions: List[Subscription] = [] + async def get_subscriptions(self) -> list[Subscription]: + subscriptions: list[Subscription] = [] async with self.db_wrapper.reader() as reader: cursor = await reader.execute( @@ -2244,7 +2245,7 @@ async def get_kv_diff( # NOTE: empty is expressed as zeros hash_1: bytes32, hash_2: bytes32, - ) -> Set[DiffData]: + ) -> set[DiffData]: async with self.db_wrapper.reader(): old_pairs = set(await self.get_keys_values(store_id, hash_1)) if len(old_pairs) == 0 and hash_1 != bytes32([0] * 32): diff --git a/chia/data_layer/dl_wallet_store.py b/chia/data_layer/dl_wallet_store.py index 7d136ee3f256..a0b3ad2cd45d 100644 --- a/chia/data_layer/dl_wallet_store.py +++ b/chia/data_layer/dl_wallet_store.py @@ -1,7 +1,7 @@ from __future__ import annotations import dataclasses -from typing import List, Optional, Type, TypeVar, Union +from typing import Optional, TypeVar, Union from aiosqlite import Row @@ -30,7 +30,7 @@ def _row_to_singleton_record(row: Row) -> SingletonRecord: def _row_to_mirror(row: Row, confirmed_at_height: Optional[uint32]) -> Mirror: - urls: List[bytes] = [] + urls: list[bytes] = [] byte_list: bytes = row[3] while byte_list != b"": length = uint16.from_bytes(byte_list[0:2]) @@ -48,7 +48,7 @@ class DataLayerStore: db_wrapper: DBWrapper2 @classmethod - async def create(cls: Type[_T_DataLayerStore], db_wrapper: DBWrapper2) -> _T_DataLayerStore: + async def create(cls: type[_T_DataLayerStore], db_wrapper: DBWrapper2) -> _T_DataLayerStore: self = cls() self.db_wrapper = db_wrapper @@ -131,11 +131,11 @@ async def get_all_singletons_for_launcher( min_generation: Optional[uint32] = None, max_generation: Optional[uint32] = None, num_results: Optional[uint32] = None, - ) -> List[SingletonRecord]: + ) -> list[SingletonRecord]: """ Returns stored singletons with a specific launcher ID. """ - query_params: List[Union[bytes32, uint32]] = [launcher_id] + query_params: list[Union[bytes32, uint32]] = [launcher_id] for optional_param in (min_generation, max_generation, num_results): if optional_param is not None: query_params.append(optional_param) @@ -200,7 +200,7 @@ async def get_latest_singleton( return _row_to_singleton_record(row) return None - async def get_unconfirmed_singletons(self, launcher_id: bytes32) -> List[SingletonRecord]: + async def get_unconfirmed_singletons(self, launcher_id: bytes32) -> list[SingletonRecord]: """ Returns all singletons with a specific launcher id that have not yet been marked confirmed """ @@ -214,7 +214,7 @@ async def get_unconfirmed_singletons(self, launcher_id: bytes32) -> List[Singlet return records - async def get_singletons_by_root(self, launcher_id: bytes32, root: bytes32) -> List[SingletonRecord]: + async def get_singletons_by_root(self, launcher_id: bytes32, root: bytes32) -> list[SingletonRecord]: async with self.db_wrapper.reader_no_transaction() as conn: cursor = await conn.execute( "SELECT * from singleton_records WHERE launcher_id=? AND root=? ORDER BY generation DESC", @@ -276,7 +276,7 @@ async def get_launcher(self, launcher_id: bytes32) -> Optional[Coin]: return Coin(bytes32(row[1][0:32]), bytes32(row[1][32:64]), uint64(int.from_bytes(row[1][64:72], "big"))) return None - async def get_all_launchers(self) -> List[bytes32]: + async def get_all_launchers(self) -> list[bytes32]: """ Checks DB for all launchers. """ @@ -329,7 +329,7 @@ async def add_mirror(self, mirror: Mirror) -> None: ), ) - async def get_mirrors(self, launcher_id: bytes32) -> List[Mirror]: + async def get_mirrors(self, launcher_id: bytes32) -> list[Mirror]: async with self.db_wrapper.reader_no_transaction() as conn: cursor = await conn.execute( "SELECT * from mirrors WHERE launcher_id=?", @@ -337,7 +337,7 @@ async def get_mirrors(self, launcher_id: bytes32) -> List[Mirror]: ) rows = await cursor.fetchall() await cursor.close() - mirrors: List[Mirror] = [] + mirrors: list[Mirror] = [] for row in rows: confirmation_height = await execute_fetchone( diff --git a/chia/data_layer/download_data.py b/chia/data_layer/download_data.py index 331e2cb50b1f..8c3a5e8d152c 100644 --- a/chia/data_layer/download_data.py +++ b/chia/data_layer/download_data.py @@ -5,7 +5,7 @@ import time from dataclasses import dataclass from pathlib import Path -from typing import List, Optional +from typing import Optional import aiohttp from typing_extensions import Literal @@ -237,7 +237,7 @@ async def insert_from_delta_file( store_id: bytes32, existing_generation: int, target_generation: int, - root_hashes: List[bytes32], + root_hashes: list[bytes32], server_info: ServerInfo, client_foldername: Path, timeout: aiohttp.ClientTimeout, diff --git a/chia/data_layer/s3_plugin_service.py b/chia/data_layer/s3_plugin_service.py index cfeb9039a290..a9489e8f1fa2 100644 --- a/chia/data_layer/s3_plugin_service.py +++ b/chia/data_layer/s3_plugin_service.py @@ -11,7 +11,7 @@ import tempfile from dataclasses import dataclass from pathlib import Path -from typing import Any, Dict, List, Optional, Set, overload +from typing import Any, Optional, overload from urllib.parse import urlparse import boto3 as boto3 @@ -31,17 +31,17 @@ class StoreConfig: id: bytes32 bucket: Optional[str] - urls: Set[str] + urls: set[str] @classmethod - def unmarshal(cls, d: Dict[str, Any]) -> StoreConfig: + def unmarshal(cls, d: dict[str, Any]) -> StoreConfig: upload_bucket = d.get("upload_bucket", None) if upload_bucket and len(upload_bucket) == 0: upload_bucket = None return StoreConfig(bytes32.from_hexstr(d["store_id"]), upload_bucket, d.get("download_urls", set())) - def marshal(self) -> Dict[str, Any]: + def marshal(self) -> dict[str, Any]: return {"store_id": self.id.hex(), "upload_bucket": self.bucket, "download_urls": self.urls} @@ -52,7 +52,7 @@ class S3Plugin: aws_access_key_id: str aws_secret_access_key: str server_files_path: Path - stores: List[StoreConfig] + stores: list[StoreConfig] instance_name: str def __init__( @@ -61,7 +61,7 @@ def __init__( aws_access_key_id: str, aws_secret_access_key: str, server_files_path: Path, - stores: List[StoreConfig], + stores: list[StoreConfig], instance_name: str, ): self.boto_resource = boto3.resource( @@ -373,7 +373,7 @@ def save_config(self, filename: str, config_data: Any) -> None: shutil.move(str(tmp_path), str(path)) -def read_store_ids_from_config(config: Dict[str, Any]) -> List[StoreConfig]: +def read_store_ids_from_config(config: dict[str, Any]) -> list[StoreConfig]: stores = [] for store in config.get("stores", []): try: @@ -389,7 +389,7 @@ def read_store_ids_from_config(config: Dict[str, Any]) -> List[StoreConfig]: return stores -def make_app(config: Dict[str, Any], instance_name: str) -> web.Application: +def make_app(config: dict[str, Any], instance_name: str) -> web.Application: try: region = config["aws_credentials"]["region"] aws_access_key_id = config["aws_credentials"]["access_key_id"] diff --git a/chia/data_layer/util/plugin.py b/chia/data_layer/util/plugin.py index 6ea26376fc6b..7f5aa8e7e568 100644 --- a/chia/data_layer/util/plugin.py +++ b/chia/data_layer/util/plugin.py @@ -2,7 +2,6 @@ import logging from pathlib import Path -from typing import List import yaml @@ -10,7 +9,7 @@ from chia.util.log_exceptions import log_exceptions -async def load_plugin_configurations(root_path: Path, config_type: str, log: logging.Logger) -> List[PluginRemote]: +async def load_plugin_configurations(root_path: Path, config_type: str, log: logging.Logger) -> list[PluginRemote]: """ Loads plugin configurations from the specified directory and validates that the contents are in the expected JSON format (an array of PluginRemote objects). It gracefully handles errors diff --git a/chia/farmer/farmer.py b/chia/farmer/farmer.py index 55c49f4d80ae..98cdbae588c2 100644 --- a/chia/farmer/farmer.py +++ b/chia/farmer/farmer.py @@ -7,10 +7,11 @@ import sys import time import traceback +from collections.abc import AsyncIterator from dataclasses import dataclass from math import floor from pathlib import Path -from typing import TYPE_CHECKING, Any, AsyncIterator, ClassVar, Dict, List, Optional, Set, Tuple, Union, cast +from typing import TYPE_CHECKING, Any, ClassVar, Optional, Union, cast import aiohttp from chia_rs import AugSchemeMPL, G1Element, G2Element, PrivateKey @@ -69,11 +70,11 @@ @dataclass(frozen=True) class GetPoolInfoResult: - pool_info: Dict[str, Any] + pool_info: dict[str, Any] new_pool_url: Optional[str] -def strip_old_entries(pairs: List[Tuple[float, Any]], before: float) -> List[Tuple[float, Any]]: +def strip_old_entries(pairs: list[tuple[float, Any]], before: float) -> list[tuple[float, Any]]: for index, [timestamp, points] in enumerate(pairs): if timestamp >= before: if index == 0: @@ -84,12 +85,12 @@ def strip_old_entries(pairs: List[Tuple[float, Any]], before: float) -> List[Tup def increment_pool_stats( - pool_states: Dict[bytes32, Any], + pool_states: dict[bytes32, Any], p2_singleton_puzzlehash: bytes32, name: str, current_time: float, count: int = 1, - value: Optional[Union[int, Dict[str, Any]]] = None, + value: Optional[Union[int, dict[str, Any]]] = None, ) -> None: if p2_singleton_puzzlehash not in pool_states: return @@ -124,8 +125,8 @@ class Farmer: def __init__( self, root_path: Path, - farmer_config: Dict[str, Any], - pool_config: Dict[str, Any], + farmer_config: dict[str, Any], + pool_config: dict[str, Any], consensus_constants: ConsensusConstants, local_keychain: Optional[Keychain] = None, ): @@ -135,22 +136,22 @@ def __init__( self.config = farmer_config self.pool_config = pool_config # Keep track of all sps, keyed on challenge chain signage point hash - self.sps: Dict[bytes32, List[farmer_protocol.NewSignagePoint]] = {} + self.sps: dict[bytes32, list[farmer_protocol.NewSignagePoint]] = {} # Keep track of harvester plot identifier (str), target sp index, and PoSpace for each challenge - self.proofs_of_space: Dict[bytes32, List[Tuple[str, ProofOfSpace]]] = {} + self.proofs_of_space: dict[bytes32, list[tuple[str, ProofOfSpace]]] = {} # Quality string to plot identifier and challenge_hash, for use with harvester.RequestSignatures - self.quality_str_to_identifiers: Dict[bytes32, Tuple[str, bytes32, bytes32, bytes32]] = {} + self.quality_str_to_identifiers: dict[bytes32, tuple[str, bytes32, bytes32, bytes32]] = {} # number of responses to each signage point - self.number_of_responses: Dict[bytes32, int] = {} + self.number_of_responses: dict[bytes32, int] = {} # A dictionary of keys to time added. These keys refer to keys in the above 4 dictionaries. This is used # to periodically clear the memory - self.cache_add_time: Dict[bytes32, uint64] = {} + self.cache_add_time: dict[bytes32, uint64] = {} - self.plot_sync_receivers: Dict[bytes32, Receiver] = {} + self.plot_sync_receivers: dict[bytes32, Receiver] = {} self.cache_clear_task: Optional[asyncio.Task[None]] = None self.update_pool_state_task: Optional[asyncio.Task[None]] = None @@ -166,18 +167,18 @@ def __init__( self.harvester_handshake_task: Optional[asyncio.Task[None]] = None # From p2_singleton_puzzle_hash to pool state dict - self.pool_state: Dict[bytes32, Dict[str, Any]] = {} + self.pool_state: dict[bytes32, dict[str, Any]] = {} # From p2_singleton to auth PrivateKey - self.authentication_keys: Dict[bytes32, PrivateKey] = {} + self.authentication_keys: dict[bytes32, PrivateKey] = {} # Last time we updated pool_state based on the config file self.last_config_access_time: float = 0 - self.all_root_sks: List[PrivateKey] = [] + self.all_root_sks: list[PrivateKey] = [] # Use to find missing signage points. (new_signage_point, time) - self.prev_signage_point: Optional[Tuple[uint64, farmer_protocol.NewSignagePoint]] = None + self.prev_signage_point: Optional[tuple[uint64, farmer_protocol.NewSignagePoint]] = None @contextlib.asynccontextmanager async def manage(self) -> AsyncIterator[None]: @@ -216,7 +217,7 @@ async def start_task() -> None: await asyncio.sleep(0.5) # https://docs.aiohttp.org/en/stable/client_advanced.html#graceful-shutdown self.started = False - def get_connections(self, request_node_type: Optional[NodeType]) -> List[Dict[str, Any]]: + def get_connections(self, request_node_type: Optional[NodeType]) -> list[dict[str, Any]]: return default_get_connections(server=self.server, request_node_type=request_node_type) async def ensure_keychain_proxy(self) -> KeychainProxy: @@ -229,7 +230,7 @@ async def ensure_keychain_proxy(self) -> KeychainProxy: raise KeychainProxyConnectionFailure() return self.keychain_proxy - async def get_all_private_keys(self) -> List[Tuple[PrivateKey, bytes]]: + async def get_all_private_keys(self) -> list[tuple[PrivateKey, bytes]]: keychain_proxy = await self.ensure_keychain_proxy() return await keychain_proxy.get_all_private_keys() @@ -314,7 +315,7 @@ async def handshake_task() -> None: def set_server(self, server: ChiaServer) -> None: self.server = server - def state_changed(self, change: str, data: Dict[str, Any]) -> None: + def state_changed(self, change: str, data: dict[str, Any]) -> None: if self.state_changed_callback is not None: self.state_changed_callback(change, data) @@ -348,7 +349,7 @@ async def _pool_get_pool_info(self, pool_config: PoolWalletConfig) -> Optional[G url = f"{pool_config.pool_url}/pool_info" async with session.get(url, ssl=ssl_context_for_root(get_mozilla_ca_crt(), log=self.log)) as resp: if resp.ok: - response: Dict[str, Any] = json.loads(await resp.text()) + response: dict[str, Any] = json.loads(await resp.text()) self.log.info(f"GET /pool_info response: {response}") new_pool_url: Optional[str] = None response_url_str = f"{resp.url}" @@ -375,7 +376,7 @@ async def _pool_get_pool_info(self, pool_config: PoolWalletConfig) -> Optional[G async def _pool_get_farmer( self, pool_config: PoolWalletConfig, authentication_token_timeout: uint8, authentication_sk: PrivateKey - ) -> Optional[Dict[str, Any]]: + ) -> Optional[dict[str, Any]]: authentication_token = get_current_authentication_token(authentication_token_timeout) message: bytes32 = std_hash( AuthenticationPayload( @@ -396,7 +397,7 @@ async def _pool_get_farmer( ssl=ssl_context_for_root(get_mozilla_ca_crt(), log=self.log), ) as resp: if resp.ok: - response: Dict[str, Any] = json.loads(await resp.text()) + response: dict[str, Any] = json.loads(await resp.text()) log_level = logging.INFO if "error_code" in response: log_level = logging.WARNING @@ -422,7 +423,7 @@ async def _pool_get_farmer( async def _pool_post_farmer( self, pool_config: PoolWalletConfig, authentication_token_timeout: uint8, owner_sk: PrivateKey - ) -> Optional[Dict[str, Any]]: + ) -> Optional[dict[str, Any]]: auth_sk: Optional[PrivateKey] = self.get_authentication_sk(pool_config) assert auth_sk is not None post_farmer_payload: PostFarmerPayload = PostFarmerPayload( @@ -444,7 +445,7 @@ async def _pool_post_farmer( ssl=ssl_context_for_root(get_mozilla_ca_crt(), log=self.log), ) as resp: if resp.ok: - response: Dict[str, Any] = json.loads(await resp.text()) + response: dict[str, Any] = json.loads(await resp.text()) log_level = logging.INFO if "error_code" in response: log_level = logging.WARNING @@ -492,7 +493,7 @@ async def _pool_put_farmer( ssl=ssl_context_for_root(get_mozilla_ca_crt(), log=self.log), ) as resp: if resp.ok: - response: Dict[str, Any] = json.loads(await resp.text()) + response: dict[str, Any] = json.loads(await resp.text()) log_level = logging.INFO if "error_code" in response: log_level = logging.WARNING @@ -525,7 +526,7 @@ def get_authentication_sk(self, pool_config: PoolWalletConfig) -> Optional[Priva async def update_pool_state(self) -> None: config = load_config(self._root_path, "config.yaml") - pool_config_list: List[PoolWalletConfig] = load_pool_config(self._root_path) + pool_config_list: list[PoolWalletConfig] = load_pool_config(self._root_path) for pool_config in pool_config_list: p2_singleton_puzzle_hash = pool_config.p2_singleton_puzzle_hash @@ -598,7 +599,7 @@ async def update_pool_state(self) -> None: pool_state["next_farmer_update"] = time.time() + UPDATE_POOL_FARMER_INFO_INTERVAL authentication_token_timeout = pool_state["authentication_token_timeout"] - async def update_pool_farmer_info() -> Tuple[Optional[GetFarmerResponse], Optional[PoolErrorCode]]: + async def update_pool_farmer_info() -> tuple[Optional[GetFarmerResponse], Optional[PoolErrorCode]]: # Run a GET /farmer to see if the farmer is already known by the pool response = await self._pool_get_farmer( pool_config, authentication_token_timeout, authentication_sk @@ -662,19 +663,19 @@ async def update_pool_farmer_info() -> Tuple[Optional[GetFarmerResponse], Option tb = traceback.format_exc() self.log.error(f"Exception in update_pool_state for {pool_config.pool_url}, {e} {tb}") - def get_public_keys(self) -> List[G1Element]: + def get_public_keys(self) -> list[G1Element]: return [child_sk.get_g1() for child_sk in self._private_keys] - def get_private_keys(self) -> List[PrivateKey]: + def get_private_keys(self) -> list[PrivateKey]: return self._private_keys - async def get_reward_targets(self, search_for_private_key: bool, max_ph_to_search: int = 500) -> Dict[str, Any]: + async def get_reward_targets(self, search_for_private_key: bool, max_ph_to_search: int = 500) -> dict[str, Any]: if search_for_private_key: all_sks = await self.get_all_private_keys() have_farmer_sk, have_pool_sk = False, False - search_addresses: List[bytes32] = [self.farmer_target, self.pool_target] + search_addresses: list[bytes32] = [self.farmer_target, self.pool_target] for sk, _ in all_sks: - found_addresses: Set[bytes32] = match_address_to_sk(sk, search_addresses, max_ph_to_search) + found_addresses: set[bytes32] = match_address_to_sk(sk, search_addresses, max_ph_to_search) if not have_farmer_sk and self.farmer_target in found_addresses: search_addresses.remove(self.farmer_target) @@ -763,8 +764,8 @@ async def generate_login_link(self, launcher_id: bytes32) -> Optional[str]: return None - async def get_harvesters(self, counts_only: bool = False) -> Dict[str, Any]: - harvesters: List[Dict[str, Any]] = [] + async def get_harvesters(self, counts_only: bool = False) -> dict[str, Any]: + harvesters: list[dict[str, Any]] = [] for connection in self.server.get_connections(NodeType.HARVESTER): self.log.debug(f"get_harvesters host: {connection.peer_info.host}, node_id: {connection.peer_node_id}") receiver = self.plot_sync_receivers.get(connection.peer_node_id) @@ -785,7 +786,7 @@ def get_receiver(self, node_id: bytes32) -> Receiver: def check_missing_signage_points( self, timestamp: uint64, new_signage_point: farmer_protocol.NewSignagePoint - ) -> Optional[Tuple[uint64, uint32]]: + ) -> Optional[tuple[uint64, uint32]]: if self.prev_signage_point is None: self.prev_signage_point = (timestamp, new_signage_point) return None @@ -836,7 +837,7 @@ async def _periodically_clear_cache_and_refresh_task(self) -> None: try: if time_slept > self.constants.SUB_SLOT_TIME_TARGET: now = time.time() - removed_keys: List[bytes32] = [] + removed_keys: list[bytes32] = [] for key, add_time in self.cache_add_time.items(): if now - float(add_time) > self.constants.SUB_SLOT_TIME_TARGET * 3: self.sps.pop(key, None) diff --git a/chia/farmer/farmer_api.py b/chia/farmer/farmer_api.py index 4de86f077f1c..b6796148cf43 100644 --- a/chia/farmer/farmer_api.py +++ b/chia/farmer/farmer_api.py @@ -3,7 +3,7 @@ import json import logging import time -from typing import Any, Dict, List, Optional, Union +from typing import Any, Optional, Union import aiohttp from chia_rs import AugSchemeMPL, G2Element, PrivateKey @@ -117,7 +117,7 @@ async def new_proof_of_space( if new_proof_of_space.farmer_reward_address_override is not None: self.farmer.notify_farmer_reward_taken_by_harvester_as_fee(sp, new_proof_of_space) - sp_src_data: Optional[List[Optional[SignatureRequestSourceData]]] = None + sp_src_data: Optional[list[Optional[SignatureRequestSourceData]]] = None if ( new_proof_of_space.include_source_signature_data or new_proof_of_space.farmer_reward_address_override is not None @@ -182,7 +182,7 @@ async def new_proof_of_space( if p2_singleton_puzzle_hash not in self.farmer.pool_state: self.farmer.log.info(f"Did not find pool info for {p2_singleton_puzzle_hash}") return - pool_state_dict: Dict[str, Any] = self.farmer.pool_state[p2_singleton_puzzle_hash] + pool_state_dict: dict[str, Any] = self.farmer.pool_state[p2_singleton_puzzle_hash] pool_url = pool_state_dict["pool_config"].pool_url if pool_url == "": # `pool_url == ""` means solo plotNFT farming @@ -268,7 +268,7 @@ async def new_proof_of_space( # The plot key is 2/2 so we need the harvester's half of the signature m_to_sign = payload.get_hash() - m_src_data: Optional[List[Optional[SignatureRequestSourceData]]] = None + m_src_data: Optional[list[Optional[SignatureRequestSourceData]]] = None if ( # pragma: no cover new_proof_of_space.include_source_signature_data @@ -374,7 +374,7 @@ async def new_proof_of_space( ) return - pool_response: Dict[str, Any] = json.loads(await resp.text()) + pool_response: dict[str, Any] = json.loads(await resp.text()) self.farmer.log.info(f"Pool response: {pool_response}") if "error_code" in pool_response: self.farmer.log.error( @@ -501,7 +501,7 @@ async def new_signage_point(self, new_signage_point: farmer_protocol.NewSignageP self.farmer.sps[new_signage_point.challenge_chain_sp].append(new_signage_point) try: - pool_difficulties: List[PoolDifficulty] = [] + pool_difficulties: list[PoolDifficulty] = [] for p2_singleton_puzzle_hash, pool_dict in self.farmer.pool_state.items(): if pool_dict["pool_config"].pool_url == "": # Self pooling @@ -568,7 +568,7 @@ async def request_signed_values(self, full_node_request: farmer_protocol.Request full_node_request.quality_string ] - message_data: Optional[List[Optional[SignatureRequestSourceData]]] = None + message_data: Optional[list[Optional[SignatureRequestSourceData]]] = None if full_node_request.foliage_block_data is not None: message_data = [ diff --git a/chia/full_node/block_height_map.py b/chia/full_node/block_height_map.py index 316eafe2adba..1ef90473afcc 100644 --- a/chia/full_node/block_height_map.py +++ b/chia/full_node/block_height_map.py @@ -3,7 +3,7 @@ import logging from dataclasses import dataclass from pathlib import Path -from typing import Dict, List, Optional, Tuple +from typing import Optional import aiofiles @@ -20,7 +20,7 @@ @streamable @dataclass(frozen=True) class SesCache(Streamable): - content: List[Tuple[uint32, bytes]] + content: list[tuple[uint32, bytes]] class BlockHeightMap: @@ -38,7 +38,7 @@ class BlockHeightMap: # All sub-epoch summaries that have been included in the blockchain from the beginning until and including the peak # (height_included, SubEpochSummary). Note: ONLY for the blocks in the path to the peak # The value is a serialized SubEpochSummary object - __sub_epoch_summaries: Dict[uint32, bytes] + __sub_epoch_summaries: dict[uint32, bytes] # count how many blocks have been added since the cache was last written to # disk @@ -194,7 +194,7 @@ async def _load_blocks_from(self, height: uint32, prev_hash: bytes32) -> None: async with self.db.reader_no_transaction() as conn: async with conn.execute(query, (window_end, height)) as cursor: # maps block-hash -> (height, prev-hash, sub-epoch-summary) - ordered: Dict[bytes32, Tuple[uint32, bytes32, Optional[bytes]]] = {} + ordered: dict[bytes32, tuple[uint32, bytes32, Optional[bytes]]] = {} for r in await cursor.fetchall(): ordered[r[0]] = (r[2], r[1], r[3]) @@ -267,5 +267,5 @@ def rollback(self, fork_height: int) -> None: def get_ses(self, height: uint32) -> SubEpochSummary: return SubEpochSummary.from_bytes(self.__sub_epoch_summaries[height]) - def get_ses_heights(self) -> List[uint32]: + def get_ses_heights(self) -> list[uint32]: return sorted(self.__sub_epoch_summaries.keys()) diff --git a/chia/full_node/block_store.py b/chia/full_node/block_store.py index a50e828b18cf..348c1ec24a19 100644 --- a/chia/full_node/block_store.py +++ b/chia/full_node/block_store.py @@ -3,7 +3,7 @@ import dataclasses import logging import sqlite3 -from typing import Dict, List, Optional, Set, Tuple +from typing import Optional import typing_extensions import zstd @@ -40,7 +40,7 @@ def decompress_blob(block_bytes: bytes) -> bytes: class BlockStore: block_cache: LRUCache[bytes32, FullBlock] db_wrapper: DBWrapper2 - ses_challenge_cache: LRUCache[bytes32, List[SubEpochChallengeSegment]] + ses_challenge_cache: LRUCache[bytes32, list[SubEpochChallengeSegment]] @classmethod async def create(cls, db_wrapper: DBWrapper2, *, use_cache: bool = True) -> BlockStore: @@ -105,7 +105,7 @@ async def rollback(self, height: int) -> None: async with self.db_wrapper.writer_maybe_transaction() as conn: await conn.execute("UPDATE full_blocks SET in_main_chain=0 WHERE height>? AND in_main_chain=1", (height,)) - async def set_in_chain(self, header_hashes: List[Tuple[bytes32]]) -> None: + async def set_in_chain(self, header_hashes: list[tuple[bytes32]]) -> None: async with self.db_wrapper.writer_maybe_transaction() as conn: async with await conn.executemany( "UPDATE full_blocks SET in_main_chain=1 WHERE header_hash=?", header_hashes @@ -162,7 +162,7 @@ async def add_full_block(self, header_hash: bytes32, block: FullBlock, block_rec ) async def persist_sub_epoch_challenge_segments( - self, ses_block_hash: bytes32, segments: List[SubEpochChallengeSegment] + self, ses_block_hash: bytes32, segments: list[SubEpochChallengeSegment] ) -> None: async with self.db_wrapper.writer_maybe_transaction() as conn: await conn.execute( @@ -173,8 +173,8 @@ async def persist_sub_epoch_challenge_segments( async def get_sub_epoch_challenge_segments( self, ses_block_hash: bytes32, - ) -> Optional[List[SubEpochChallengeSegment]]: - cached: Optional[List[SubEpochChallengeSegment]] = self.ses_challenge_cache.get(ses_block_hash) + ) -> Optional[list[SubEpochChallengeSegment]]: + cached: Optional[list[SubEpochChallengeSegment]] = self.ses_challenge_cache.get(ses_block_hash) if cached is not None: return cached @@ -186,7 +186,7 @@ async def get_sub_epoch_challenge_segments( row = await cursor.fetchone() if row is not None: - challenge_segments: List[SubEpochChallengeSegment] = SubEpochSegments.from_bytes(row[0]).challenge_segments + challenge_segments: list[SubEpochChallengeSegment] = SubEpochSegments.from_bytes(row[0]).challenge_segments self.ses_challenge_cache.put(ses_block_hash, challenge_segments) return challenge_segments return None @@ -225,14 +225,14 @@ async def get_full_block_bytes(self, header_hash: bytes32) -> Optional[bytes]: return None - async def get_full_blocks_at(self, heights: List[uint32]) -> List[FullBlock]: + async def get_full_blocks_at(self, heights: list[uint32]) -> list[FullBlock]: if len(heights) == 0: return [] formatted_str = f'SELECT block from full_blocks WHERE height in ({"?," * (len(heights) - 1)}?)' async with self.db_wrapper.reader_no_transaction() as conn: async with conn.execute(formatted_str, heights) as cursor: - ret: List[FullBlock] = [] + ret: list[FullBlock] = [] for row in await cursor.fetchall(): ret.append(decompress(row[0])) return ret @@ -285,11 +285,11 @@ async def get_generator(self, header_hash: bytes32) -> Optional[bytes]: b = FullBlock.from_bytes(block_bytes) return None if b.transactions_generator is None else bytes(b.transactions_generator) - async def get_generators_at(self, heights: Set[uint32]) -> Dict[uint32, bytes]: + async def get_generators_at(self, heights: set[uint32]) -> dict[uint32, bytes]: if len(heights) == 0: return {} - generators: Dict[uint32, bytes] = {} + generators: dict[uint32, bytes] = {} formatted_str = ( f"SELECT block, height from full_blocks " f'WHERE in_main_chain=1 AND height in ({"?," * (len(heights) - 1)}?)' @@ -317,7 +317,7 @@ async def get_generators_at(self, heights: Set[uint32]) -> Dict[uint32, bytes]: return generators - async def get_block_records_by_hash(self, header_hashes: List[bytes32]) -> List[BlockRecord]: + async def get_block_records_by_hash(self, header_hashes: list[bytes32]) -> list[BlockRecord]: """ Returns a list of Block Records, ordered by the same order in which header_hashes are passed in. Throws an exception if the blocks are not present @@ -325,7 +325,7 @@ async def get_block_records_by_hash(self, header_hashes: List[bytes32]) -> List[ if len(header_hashes) == 0: return [] - all_blocks: Dict[bytes32, BlockRecord] = {} + all_blocks: dict[bytes32, BlockRecord] = {} async with self.db_wrapper.reader_no_transaction() as conn: async with conn.execute( "SELECT header_hash,block_record " @@ -337,7 +337,7 @@ async def get_block_records_by_hash(self, header_hashes: List[bytes32]) -> List[ block_rec = BlockRecord.from_bytes(row[1]) all_blocks[block_rec.header_hash] = block_rec - ret: List[BlockRecord] = [] + ret: list[BlockRecord] = [] for hh in header_hashes: if hh not in all_blocks: raise ValueError(f"Header hash {hh} not in the blockchain") @@ -363,7 +363,7 @@ async def get_prev_hash(self, header_hash: bytes32) -> bytes32: raise KeyError("missing block in chain") return bytes32(row[0]) - async def get_block_bytes_by_hash(self, header_hashes: List[bytes32]) -> List[bytes]: + async def get_block_bytes_by_hash(self, header_hashes: list[bytes32]) -> list[bytes]: """ Returns a list of Full Blocks block blobs, ordered by the same order in which header_hashes are passed in. Throws an exception if the blocks are not present @@ -376,14 +376,14 @@ async def get_block_bytes_by_hash(self, header_hashes: List[bytes32]) -> List[by formatted_str = ( f'SELECT header_hash, block from full_blocks WHERE header_hash in ({"?," * (len(header_hashes) - 1)}?)' ) - all_blocks: Dict[bytes32, bytes] = {} + all_blocks: dict[bytes32, bytes] = {} async with self.db_wrapper.reader_no_transaction() as conn: async with conn.execute(formatted_str, header_hashes) as cursor: for row in await cursor.fetchall(): header_hash = bytes32(row[0]) all_blocks[header_hash] = decompress_blob(row[1]) - ret: List[bytes] = [] + ret: list[bytes] = [] for hh in header_hashes: block = all_blocks.get(hh) if block is not None: @@ -392,7 +392,7 @@ async def get_block_bytes_by_hash(self, header_hashes: List[bytes32]) -> List[by raise ValueError(f"Header hash {hh} not in the blockchain") return ret - async def get_blocks_by_hash(self, header_hashes: List[bytes32]) -> List[FullBlock]: + async def get_blocks_by_hash(self, header_hashes: list[bytes32]) -> list[FullBlock]: """ Returns a list of Full Blocks blocks, ordered by the same order in which header_hashes are passed in. Throws an exception if the blocks are not present @@ -404,7 +404,7 @@ async def get_blocks_by_hash(self, header_hashes: List[bytes32]) -> List[FullBlo formatted_str = ( f'SELECT header_hash, block from full_blocks WHERE header_hash in ({"?," * (len(header_hashes) - 1)}?)' ) - all_blocks: Dict[bytes32, FullBlock] = {} + all_blocks: dict[bytes32, FullBlock] = {} async with self.db_wrapper.reader_no_transaction() as conn: async with conn.execute(formatted_str, header_hashes) as cursor: for row in await cursor.fetchall(): @@ -412,7 +412,7 @@ async def get_blocks_by_hash(self, header_hashes: List[bytes32]) -> List[FullBlo full_block: FullBlock = decompress(row[1]) all_blocks[header_hash] = full_block self.block_cache.put(header_hash, full_block) - ret: List[FullBlock] = [] + ret: list[FullBlock] = [] for hh in header_hashes: if hh not in all_blocks: raise ValueError(f"Header hash {hh} not in the blockchain") @@ -436,13 +436,13 @@ async def get_block_records_in_range( self, start: int, stop: int, - ) -> Dict[bytes32, BlockRecord]: + ) -> dict[bytes32, BlockRecord]: """ Returns a dictionary with all blocks in range between start and stop if present. """ - ret: Dict[bytes32, BlockRecord] = {} + ret: dict[bytes32, BlockRecord] = {} async with self.db_wrapper.reader_no_transaction() as conn: async with conn.execute( "SELECT header_hash,block_record " "FROM full_blocks " "WHERE height >= ? AND height <= ?", @@ -459,7 +459,7 @@ async def get_block_bytes_in_range( self, start: int, stop: int, - ) -> List[bytes]: + ) -> list[bytes]: """ Returns a list with all full blocks in range between start and stop if present. @@ -471,12 +471,12 @@ async def get_block_bytes_in_range( "SELECT block FROM full_blocks WHERE height >= ? AND height <= ? and in_main_chain=1", (start, stop), ) as cursor: - rows: List[sqlite3.Row] = list(await cursor.fetchall()) + rows: list[sqlite3.Row] = list(await cursor.fetchall()) if len(rows) != (stop - start) + 1: raise ValueError(f"Some blocks in range {start}-{stop} were not found.") return [decompress_blob(row[0]) for row in rows] - async def get_peak(self) -> Optional[Tuple[bytes32, uint32]]: + async def get_peak(self) -> Optional[tuple[bytes32, uint32]]: async with self.db_wrapper.reader_no_transaction() as conn: async with conn.execute("SELECT hash FROM current_peak WHERE key = 0") as cursor: peak_row = await cursor.fetchone() @@ -491,7 +491,7 @@ async def get_peak(self) -> Optional[Tuple[bytes32, uint32]]: async def get_block_records_close_to_peak( self, blocks_n: int - ) -> Tuple[Dict[bytes32, BlockRecord], Optional[bytes32]]: + ) -> tuple[dict[bytes32, BlockRecord], Optional[bytes32]]: """ Returns a dictionary with all blocks that have height >= peak height - blocks_n, as well as the peak header hash. @@ -501,7 +501,7 @@ async def get_block_records_close_to_peak( if peak is None: return {}, None - ret: Dict[bytes32, BlockRecord] = {} + ret: dict[bytes32, BlockRecord] = {} async with self.db_wrapper.reader_no_transaction() as conn: async with conn.execute( "SELECT header_hash, block_record " "FROM full_blocks " "WHERE height >= ?", @@ -531,7 +531,7 @@ async def is_fully_compactified(self, header_hash: bytes32) -> Optional[bool]: return None return bool(row[0]) - async def get_random_not_compactified(self, number: int) -> List[int]: + async def get_random_not_compactified(self, number: int) -> list[int]: async with self.db_wrapper.reader_no_transaction() as conn: async with conn.execute( f"SELECT height FROM full_blocks WHERE in_main_chain=1 AND is_fully_compactified=0 " diff --git a/chia/full_node/coin_store.py b/chia/full_node/coin_store.py index fb4d07c9ea57..bec0cfecda26 100644 --- a/chia/full_node/coin_store.py +++ b/chia/full_node/coin_store.py @@ -4,7 +4,8 @@ import logging import sqlite3 import time -from typing import Any, Collection, Dict, List, Optional, Set, Tuple +from collections.abc import Collection +from typing import Any, Optional import typing_extensions from aiosqlite import Cursor @@ -31,7 +32,7 @@ class CoinStore: """ db_wrapper: DBWrapper2 - coins_added_at_height_cache: LRUCache[uint32, List[CoinRecord]] + coins_added_at_height_cache: LRUCache[uint32, list[CoinRecord]] @classmethod async def create(cls, db_wrapper: DBWrapper2) -> CoinStore: @@ -85,8 +86,8 @@ async def new_block( timestamp: uint64, included_reward_coins: Collection[Coin], tx_additions: Collection[Coin], - tx_removals: List[bytes32], - ) -> List[CoinRecord]: + tx_removals: list[bytes32], + ) -> list[CoinRecord]: """ Only called for blocks which are blocks (and thus have rewards and transactions) Returns a list of the CoinRecords that were added by this block @@ -148,16 +149,16 @@ async def get_coin_record(self, coin_name: bytes32) -> Optional[CoinRecord]: return CoinRecord(coin, row[0], row[1], row[2], row[6]) return None - async def get_coin_records(self, names: Collection[bytes32]) -> List[CoinRecord]: + async def get_coin_records(self, names: Collection[bytes32]) -> list[CoinRecord]: if len(names) == 0: return [] - coins: List[CoinRecord] = [] + coins: list[CoinRecord] = [] async with self.db_wrapper.reader_no_transaction() as conn: - cursors: List[Cursor] = [] + cursors: list[Cursor] = [] for batch in to_batches(names, SQLITE_MAX_VARIABLE_NUMBER): - names_db: Tuple[Any, ...] = tuple(batch.entries) + names_db: tuple[Any, ...] = tuple(batch.entries) cursors.append( await conn.execute( f"SELECT confirmed_index, spent_index, coinbase, puzzle_hash, " @@ -175,8 +176,8 @@ async def get_coin_records(self, names: Collection[bytes32]) -> List[CoinRecord] return coins - async def get_coins_added_at_height(self, height: uint32) -> List[CoinRecord]: - coins_added: Optional[List[CoinRecord]] = self.coins_added_at_height_cache.get(height) + async def get_coins_added_at_height(self, height: uint32) -> list[CoinRecord]: + coins_added: Optional[list[CoinRecord]] = self.coins_added_at_height_cache.get(height) if coins_added is not None: return coins_added @@ -194,7 +195,7 @@ async def get_coins_added_at_height(self, height: uint32) -> List[CoinRecord]: self.coins_added_at_height_cache.put(height, coins) return coins - async def get_coins_removed_at_height(self, height: uint32) -> List[CoinRecord]: + async def get_coins_removed_at_height(self, height: uint32) -> list[CoinRecord]: # Special case to avoid querying all unspent coins (spent_index=0) if height == 0: return [] @@ -212,7 +213,7 @@ async def get_coins_removed_at_height(self, height: uint32) -> List[CoinRecord]: coins.append(coin_record) return coins - async def get_all_coins(self, include_spent_coins: bool) -> List[CoinRecord]: + async def get_all_coins(self, include_spent_coins: bool) -> list[CoinRecord]: # WARNING: this should only be used for testing or in a simulation, # running it on a synced testnet or mainnet node will most likely result in an OOM error. coins = set() @@ -236,7 +237,7 @@ async def get_coin_records_by_puzzle_hash( puzzle_hash: bytes32, start_height: uint32 = uint32(0), end_height: uint32 = uint32((2**32) - 1), - ) -> List[CoinRecord]: + ) -> list[CoinRecord]: coins = set() async with self.db_wrapper.reader_no_transaction() as conn: @@ -255,15 +256,15 @@ async def get_coin_records_by_puzzle_hash( async def get_coin_records_by_puzzle_hashes( self, include_spent_coins: bool, - puzzle_hashes: List[bytes32], + puzzle_hashes: list[bytes32], start_height: uint32 = uint32(0), end_height: uint32 = uint32((2**32) - 1), - ) -> List[CoinRecord]: + ) -> list[CoinRecord]: if len(puzzle_hashes) == 0: return [] coins = set() - puzzle_hashes_db: Tuple[Any, ...] + puzzle_hashes_db: tuple[Any, ...] puzzle_hashes_db = tuple(puzzle_hashes) async with self.db_wrapper.reader_no_transaction() as conn: @@ -283,10 +284,10 @@ async def get_coin_records_by_puzzle_hashes( async def get_coin_records_by_names( self, include_spent_coins: bool, - names: List[bytes32], + names: list[bytes32], start_height: uint32 = uint32(0), end_height: uint32 = uint32((2**32) - 1), - ) -> List[CoinRecord]: + ) -> list[CoinRecord]: if len(names) == 0: return [] @@ -320,18 +321,18 @@ def row_to_coin_state(self, row: sqlite3.Row) -> CoinState: async def get_coin_states_by_puzzle_hashes( self, include_spent_coins: bool, - puzzle_hashes: Set[bytes32], + puzzle_hashes: set[bytes32], min_height: uint32 = uint32(0), *, max_items: int = 50000, - ) -> Set[CoinState]: + ) -> set[CoinState]: if len(puzzle_hashes) == 0: return set() - coins: Set[CoinState] = set() + coins: set[CoinState] = set() async with self.db_wrapper.reader_no_transaction() as conn: for batch in to_batches(puzzle_hashes, SQLITE_MAX_VARIABLE_NUMBER): - puzzle_hashes_db: Tuple[Any, ...] = tuple(batch.entries) + puzzle_hashes_db: tuple[Any, ...] = tuple(batch.entries) async with conn.execute( f"SELECT confirmed_index, spent_index, coinbase, puzzle_hash, " f"coin_parent, amount, timestamp FROM coin_record INDEXED BY coin_puzzle_hash " @@ -353,17 +354,17 @@ async def get_coin_states_by_puzzle_hashes( async def get_coin_records_by_parent_ids( self, include_spent_coins: bool, - parent_ids: List[bytes32], + parent_ids: list[bytes32], start_height: uint32 = uint32(0), end_height: uint32 = uint32((2**32) - 1), - ) -> List[CoinRecord]: + ) -> list[CoinRecord]: if len(parent_ids) == 0: return [] coins = set() async with self.db_wrapper.reader_no_transaction() as conn: for batch in to_batches(parent_ids, SQLITE_MAX_VARIABLE_NUMBER): - parent_ids_db: Tuple[Any, ...] = tuple(batch.entries) + parent_ids_db: tuple[Any, ...] = tuple(batch.entries) async with conn.execute( f"SELECT confirmed_index, spent_index, coinbase, puzzle_hash, coin_parent, amount, timestamp " f'FROM coin_record WHERE coin_parent in ({"?," * (len(batch.entries) - 1)}?) ' @@ -385,14 +386,14 @@ async def get_coin_states_by_ids( *, max_height: uint32 = uint32.MAXIMUM, max_items: int = 50000, - ) -> List[CoinState]: + ) -> list[CoinState]: if len(coin_ids) == 0: return [] - coins: List[CoinState] = [] + coins: list[CoinState] = [] async with self.db_wrapper.reader_no_transaction() as conn: for batch in to_batches(coin_ids, SQLITE_MAX_VARIABLE_NUMBER): - coin_ids_db: Tuple[Any, ...] = tuple(batch.entries) + coin_ids_db: tuple[Any, ...] = tuple(batch.entries) max_height_sql = "" if max_height != uint32.MAXIMUM: @@ -417,7 +418,7 @@ async def get_coin_states_by_ids( async def batch_coin_states_by_puzzle_hashes( self, - puzzle_hashes: List[bytes32], + puzzle_hashes: list[bytes32], *, min_height: uint32 = uint32(0), include_spent: bool = True, @@ -425,7 +426,7 @@ async def batch_coin_states_by_puzzle_hashes( include_hinted: bool = True, min_amount: uint64 = uint64(0), max_items: int = 50000, - ) -> Tuple[List[CoinState], Optional[uint32]]: + ) -> tuple[list[CoinState], Optional[uint32]]: """ Returns the coin states, as well as the next block height (or `None` if finished). You cannot exceed `CoinStore.MAX_PUZZLE_HASH_BATCH_SIZE` puzzle hashes in the query. @@ -439,8 +440,8 @@ async def batch_coin_states_by_puzzle_hashes( return [], None # Coin states are keyed by coin id to filter out and prevent duplicates. - coin_states_dict: Dict[bytes32, CoinState] = dict() - coin_states: List[CoinState] + coin_states_dict: dict[bytes32, CoinState] = dict() + coin_states: list[CoinState] async with self.db_wrapper.reader() as conn: puzzle_hashes_db = tuple(puzzle_hashes) @@ -530,13 +531,13 @@ async def batch_coin_states_by_puzzle_hashes( return coin_states, next_height - async def rollback_to_block(self, block_index: int) -> List[CoinRecord]: + async def rollback_to_block(self, block_index: int) -> list[CoinRecord]: """ Note that block_index can be negative, in which case everything is rolled back Returns the list of coin records that have been modified """ - coin_changes: Dict[bytes32, CoinRecord] = {} + coin_changes: dict[bytes32, CoinRecord] = {} # Add coins that are confirmed in the reverted blocks to the list of updated coins. async with self.db_wrapper.writer_maybe_transaction() as conn: async with conn.execute( @@ -569,7 +570,7 @@ async def rollback_to_block(self, block_index: int) -> List[CoinRecord]: return list(coin_changes.values()) # Store CoinRecord in DB - async def _add_coin_records(self, records: List[CoinRecord]) -> None: + async def _add_coin_records(self, records: list[CoinRecord]) -> None: values2 = [] for record in records: values2.append( @@ -592,7 +593,7 @@ async def _add_coin_records(self, records: List[CoinRecord]) -> None: ) # Update coin_record to be spent in DB - async def _set_spent(self, coin_names: List[bytes32], index: uint32) -> None: + async def _set_spent(self, coin_names: list[bytes32], index: uint32) -> None: assert len(coin_names) == 0 or index > 0 if len(coin_names) == 0: diff --git a/chia/full_node/fee_estimate.py b/chia/full_node/fee_estimate.py index e8c1f635c98d..1f8e41d3887f 100644 --- a/chia/full_node/fee_estimate.py +++ b/chia/full_node/fee_estimate.py @@ -2,7 +2,7 @@ import math from dataclasses import dataclass -from typing import List, Optional +from typing import Optional from chia.types.fee_rate import FeeRate, FeeRateV2 from chia.util.ints import uint64 @@ -51,4 +51,4 @@ class FeeEstimateGroup(Streamable): """ error: Optional[str] - estimates: List[FeeEstimate] + estimates: list[FeeEstimate] diff --git a/chia/full_node/fee_estimation.py b/chia/full_node/fee_estimation.py index 09e018c12d04..c31771def3dc 100644 --- a/chia/full_node/fee_estimation.py +++ b/chia/full_node/fee_estimation.py @@ -2,7 +2,6 @@ from dataclasses import dataclass from datetime import datetime -from typing import List from chia.types.clvm_cost import CLVMCost from chia.types.fee_rate import FeeRate @@ -90,4 +89,4 @@ class FeeBlockInfo: # See BlockRecord """ block_height: uint32 - included_items: List[MempoolItemInfo] + included_items: list[MempoolItemInfo] diff --git a/chia/full_node/fee_history.py b/chia/full_node/fee_history.py index 59c54453078c..b29e41b887d0 100644 --- a/chia/full_node/fee_history.py +++ b/chia/full_node/fee_history.py @@ -1,7 +1,6 @@ from __future__ import annotations from dataclasses import dataclass -from typing import List from chia.util.ints import uint8, uint32 from chia.util.streamable import Streamable, streamable @@ -11,10 +10,10 @@ @dataclass(frozen=True) class FeeStatBackup(Streamable): type: str - tx_ct_avg: List[str] - confirmed_average: List[List[str]] - failed_average: List[List[str]] - m_fee_rate_avg: List[str] + tx_ct_avg: list[str] + confirmed_average: list[list[str]] + failed_average: list[list[str]] + m_fee_rate_avg: list[str] @streamable @@ -23,4 +22,4 @@ class FeeTrackerBackup(Streamable): fee_estimator_version: uint8 first_recorded_height: uint32 latest_seen_height: uint32 - stats: List[FeeStatBackup] + stats: list[FeeStatBackup] diff --git a/chia/full_node/fee_tracker.py b/chia/full_node/fee_tracker.py index 5e43518def33..ba517189bda7 100644 --- a/chia/full_node/fee_tracker.py +++ b/chia/full_node/fee_tracker.py @@ -3,7 +3,7 @@ import logging from bisect import bisect_left from dataclasses import dataclass -from typing import List, Optional, Tuple +from typing import Optional from chia.full_node.fee_estimate_store import FeeStore from chia.full_node.fee_estimation import MempoolItemInfo @@ -48,7 +48,7 @@ class EstimateResult: median: float -def get_estimate_block_intervals() -> List[int]: +def get_estimate_block_intervals() -> list[int]: return [ SHORT_BLOCK_PERIOD * SHORT_SCALE - SHORT_SCALE, MED_BLOCK_PERIOD * MED_SCALE - MED_SCALE, @@ -56,31 +56,31 @@ def get_estimate_block_intervals() -> List[int]: ] -def get_estimate_time_intervals() -> List[uint64]: +def get_estimate_time_intervals() -> list[uint64]: return [uint64(blocks * SECONDS_PER_BLOCK) for blocks in get_estimate_block_intervals()] # Implementation of bitcoin core fee estimation algorithm # https://gist.github.com/morcos/d3637f015bc4e607e1fd10d8351e9f41 class FeeStat: # TxConfirmStats - buckets: List[float] # These elements represent the upper-bound of the range for the bucket + buckets: list[float] # These elements represent the upper-bound of the range for the bucket # For each bucket xL # Count the total number of txs in each bucket # Track historical moving average of this total over block - tx_ct_avg: List[float] + tx_ct_avg: list[float] # Count the total number of txs confirmed within Y periods in each bucket # Track the historical moving average of these totals over blocks - confirmed_average: List[List[float]] # confirmed_average [y][x] + confirmed_average: list[list[float]] # confirmed_average [y][x] # Track moving average of txs which have been evicted from the mempool # after failing to be confirmed within Y block - failed_average: List[List[float]] # failed_average [y][x] + failed_average: list[list[float]] # failed_average [y][x] # Sum the total fee_rate of all txs in each bucket # Track historical moving average of this total over blocks - m_fee_rate_avg: List[float] + m_fee_rate_avg: list[float] decay: float @@ -90,15 +90,15 @@ class FeeStat: # TxConfirmStats # Mempool counts of outstanding transactions # For each bucket x, track the number of transactions in mempool # that are unconfirmed for each possible confirmation value y - unconfirmed_txs: List[List[int]] + unconfirmed_txs: list[list[int]] # transactions still unconfirmed after get_max_confirmed for each bucket - old_unconfirmed_txs: List[int] + old_unconfirmed_txs: list[int] max_confirms: int fee_store: FeeStore def __init__( self, - buckets: List[float], + buckets: list[float], max_periods: int, decay: float, scale: int, @@ -191,10 +191,10 @@ def remove_tx(self, latest_seen_height: uint32, item: MempoolItemInfo, bucket_in self.failed_average[i][bucket_index] += 1 def create_backup(self) -> FeeStatBackup: - str_tx_ct_abg: List[str] = [] - str_confirmed_average: List[List[str]] = [] - str_failed_average: List[List[str]] = [] - str_m_fee_rate_avg: List[str] = [] + str_tx_ct_abg: list[str] = [] + str_confirmed_average: list[list[str]] = [] + str_failed_average: list[list[str]] = [] + str_m_fee_rate_avg: list[str] = [] for i in range(0, self.max_periods): str_i_list_conf = [] for j in range(0, len(self.confirmed_average[i])): @@ -391,7 +391,7 @@ def clamp(n: int, smallest: int, largest: int) -> int: return max(smallest, min(n, largest)) -def get_bucket_index(buckets: List[float], fee_rate: float) -> int: +def get_bucket_index(buckets: list[float], fee_rate: float) -> int: if len(buckets) < 1: raise RuntimeError("get_bucket_index: buckets is invalid ({buckets})") # Choose the bucket to the left if we do not have exactly this fee rate @@ -400,10 +400,10 @@ def get_bucket_index(buckets: List[float], fee_rate: float) -> int: return clamp(bucket_index, 0, len(buckets) - 1) -def init_buckets() -> List[float]: +def init_buckets() -> list[float]: fee_rate = INITIAL_STEP - buckets: List[float] = [] + buckets: list[float] = [] while fee_rate < MAX_FEE_RATE: buckets.append(fee_rate) fee_rate = fee_rate * STEP_SIZE @@ -420,7 +420,7 @@ class FeeTracker: latest_seen_height: uint32 first_recorded_height: uint32 fee_store: FeeStore - buckets: List[float] + buckets: list[float] def __init__(self, fee_store: FeeStore): self.log = logging.Logger(__name__) @@ -476,7 +476,7 @@ def shutdown(self) -> None: ) self.fee_store.store_fee_data(backup) - def process_block(self, block_height: uint32, items: List[MempoolItemInfo]) -> None: + def process_block(self, block_height: uint32, items: list[MempoolItemInfo]) -> None: """A new block has been farmed and these transactions have been included in that block""" if block_height <= self.latest_seen_height: # Ignore reorgs @@ -540,7 +540,7 @@ def estimate_fee(self, target_time: int) -> EstimateResult: confirm_target_block = int(target_time / SECONDS_PER_BLOCK) + 1 return self.estimate_fee_for_block(uint32(confirm_target_block)) - def estimate_fees(self) -> Tuple[EstimateResult, EstimateResult, EstimateResult]: + def estimate_fees(self) -> tuple[EstimateResult, EstimateResult, EstimateResult]: """returns the fee estimate for short, medium, and long time horizons""" short = self.short_horizon.estimate_median_val( conf_target=SHORT_BLOCK_PERIOD * SHORT_SCALE - SHORT_SCALE, diff --git a/chia/full_node/full_node.py b/chia/full_node/full_node.py index 7d64705f594d..6469b0c71adb 100644 --- a/chia/full_node/full_node.py +++ b/chia/full_node/full_node.py @@ -10,25 +10,10 @@ import sqlite3 import time import traceback +from collections.abc import AsyncIterator, Awaitable from multiprocessing.context import BaseContext from pathlib import Path -from typing import ( - TYPE_CHECKING, - Any, - AsyncIterator, - Awaitable, - Callable, - ClassVar, - Dict, - List, - Optional, - Set, - TextIO, - Tuple, - Union, - cast, - final, -) +from typing import TYPE_CHECKING, Any, Callable, ClassVar, Optional, TextIO, Union, cast, final from chia_rs import AugSchemeMPL, BLSCache from packaging.version import Version @@ -106,19 +91,19 @@ # This is the result of calling peak_post_processing, which is then fed into peak_post_processing_2 @dataclasses.dataclass class PeakPostProcessingResult: - mempool_peak_result: List[NewPeakItem] # The new items from calling MempoolManager.new_peak - mempool_removals: List[MempoolRemoveInfo] # The removed mempool items from calling MempoolManager.new_peak + mempool_peak_result: list[NewPeakItem] # The new items from calling MempoolManager.new_peak + mempool_removals: list[MempoolRemoveInfo] # The removed mempool items from calling MempoolManager.new_peak fns_peak_result: FullNodeStorePeakResult # The result of calling FullNodeStore.new_peak - hints: List[Tuple[bytes32, bytes]] # The hints added to the DB - lookup_coin_ids: List[bytes32] # The coin IDs that we need to look up to notify wallets of changes + hints: list[tuple[bytes32, bytes]] # The hints added to the DB + lookup_coin_ids: list[bytes32] # The coin IDs that we need to look up to notify wallets of changes @dataclasses.dataclass(frozen=True) class WalletUpdate: fork_height: uint32 peak: Peak - coin_records: List[CoinRecord] - hints: Dict[bytes32, bytes32] + coin_records: list[CoinRecord] + hints: dict[bytes32, bytes32] @final @@ -130,9 +115,9 @@ class FullNode: _protocol_check: ClassVar[RpcServiceProtocol] = cast("FullNode", None) root_path: Path - config: Dict[str, Any] + config: dict[str, Any] constants: ConsensusConstants - signage_point_times: List[float] + signage_point_times: list[float] full_node_store: FullNodeStore log: logging.Logger db_path: Path @@ -141,16 +126,16 @@ class FullNode: initialized: bool = False _server: Optional[ChiaServer] = None _shut_down: bool = False - pow_creation: Dict[bytes32, asyncio.Event] = dataclasses.field(default_factory=dict) + pow_creation: dict[bytes32, asyncio.Event] = dataclasses.field(default_factory=dict) state_changed_callback: Optional[StateChangedProtocol] = None full_node_peers: Optional[FullNodePeers] = None sync_store: SyncStore = dataclasses.field(default_factory=SyncStore) uncompact_task: Optional[asyncio.Task[None]] = None - compact_vdf_requests: Set[bytes32] = dataclasses.field(default_factory=set) + compact_vdf_requests: set[bytes32] = dataclasses.field(default_factory=set) # TODO: Logging isn't setup yet so the log entries related to parsing the # config would end up on stdout if handled here. multiprocessing_context: Optional[BaseContext] = None - _ui_tasks: Set[asyncio.Task[None]] = dataclasses.field(default_factory=set) + _ui_tasks: set[asyncio.Task[None]] = dataclasses.field(default_factory=set) subscriptions: PeerSubscriptions = dataclasses.field(default_factory=PeerSubscriptions) _transaction_queue_task: Optional[asyncio.Task[None]] = None simulator_transaction_callback: Optional[Callable[[bytes32], Awaitable[None]]] = None @@ -169,7 +154,7 @@ class FullNode: _timelord_lock: Optional[asyncio.Lock] = None weight_proof_handler: Optional[WeightProofHandler] = None # hashes of peaks that failed long sync on chip13 Validation - bad_peak_cache: Dict[bytes32, uint32] = dataclasses.field(default_factory=dict) + bad_peak_cache: dict[bytes32, uint32] = dataclasses.field(default_factory=dict) wallet_sync_task: Optional[asyncio.Task[None]] = None _bls_cache: BLSCache = dataclasses.field(default_factory=lambda: BLSCache(50000)) @@ -185,7 +170,7 @@ def server(self) -> ChiaServer: @classmethod async def create( cls, - config: Dict[str, Any], + config: dict[str, Any], root_path: Path, consensus_constants: ConsensusConstants, name: str = __name__, @@ -435,9 +420,9 @@ def compact_vdf_sem(self) -> LimitedSemaphore: assert self._compact_vdf_sem is not None return self._compact_vdf_sem - def get_connections(self, request_node_type: Optional[NodeType]) -> List[Dict[str, Any]]: + def get_connections(self, request_node_type: Optional[NodeType]) -> list[dict[str, Any]]: connections = self.server.get_connections(request_node_type) - con_info: List[Dict[str, Any]] = [] + con_info: list[dict[str, Any]] = [] if self.sync_store is not None: peak_store = self.sync_store.peer_to_peak else: @@ -452,7 +437,7 @@ def get_connections(self, request_node_type: Optional[NodeType]) -> List[Dict[st peak_height = None peak_hash = None peak_weight = None - con_dict: Dict[str, Any] = { + con_dict: dict[str, Any] = { "type": con.connection_type, "local_port": con.local_port, "peer_host": con.peer_info.host, @@ -510,7 +495,7 @@ async def initialize_weight_proof(self) -> None: def set_server(self, server: ChiaServer) -> None: self._server = server - dns_servers: List[str] = [] + dns_servers: list[str] = [] network_name = self.config["selected_network"] try: default_port = self.config["network_overrides"]["config"][network_name]["default_full_node_port"] @@ -540,7 +525,7 @@ def set_server(self, server: ChiaServer) -> None: self.log.error(f"Exception in peer discovery: {e}") self.log.error(f"Exception Stack: {error_stack}") - def _state_changed(self, change: str, change_data: Optional[Dict[str, Any]] = None) -> None: + def _state_changed(self, change: str, change_data: Optional[dict[str, Any]] = None) -> None: if self.state_changed_callback is not None: self.state_changed_callback(change, change_data) @@ -735,7 +720,7 @@ async def new_peak(self, request: full_node_protocol.NewPeak, peer: WSChiaConnec # If peer connects while we are syncing, check if they have the block we are syncing towards target_peak = self.sync_store.target_peak if target_peak is not None and request.header_hash != target_peak.header_hash: - peak_peers: Set[bytes32] = self.sync_store.get_peers_that_have_peak([target_peak.header_hash]) + peak_peers: set[bytes32] = self.sync_store.get_peers_that_have_peak([target_peak.header_hash]) # Don't ask if we already know this peer has the peak if peer.peer_node_id not in peak_peers: target_peak_response: Optional[RespondBlock] = await peer.call_api( @@ -1003,7 +988,7 @@ async def _sync(self) -> None: async def request_validate_wp( self, peak_header_hash: bytes32, peak_height: uint32, peak_weight: uint128 - ) -> Tuple[uint32, List[SubEpochSummary]]: + ) -> tuple[uint32, list[SubEpochSummary]]: if self.weight_proof_handler is None: raise RuntimeError("Weight proof handler is None") peers_with_peak = self.get_peers_with_peak(peak_header_hash) @@ -1060,11 +1045,11 @@ async def sync_from_fork_point( fork_point_height: uint32, target_peak_sb_height: uint32, peak_hash: bytes32, - summaries: List[SubEpochSummary], + summaries: list[SubEpochSummary], ) -> None: buffer_size = 4 self.log.info(f"Start syncing from fork point at {fork_point_height} up to {target_peak_sb_height}") - peers_with_peak: List[WSChiaConnection] = self.get_peers_with_peak(peak_hash) + peers_with_peak: list[WSChiaConnection] = self.get_peers_with_peak(peak_hash) fork_point_height = await check_fork_next_block( self.blockchain, fork_point_height, peers_with_peak, node_next_block_check ) @@ -1101,10 +1086,10 @@ async def sync_from_fork_point( vs = ValidationState(ssi, diff, prev_ses_block) async def fetch_block_batches( - batch_queue: asyncio.Queue[Optional[Tuple[WSChiaConnection, List[FullBlock]]]] + batch_queue: asyncio.Queue[Optional[tuple[WSChiaConnection, list[FullBlock]]]] ) -> None: start_height, end_height = 0, 0 - new_peers_with_peak: List[WSChiaConnection] = peers_with_peak[:] + new_peers_with_peak: list[WSChiaConnection] = peers_with_peak[:] try: # block request ranges are *inclusive*, this requires some # gymnastics of this range (+1 to make it exclusive, like normal @@ -1136,14 +1121,14 @@ async def fetch_block_batches( await batch_queue.put(None) async def validate_block_batches( - inner_batch_queue: asyncio.Queue[Optional[Tuple[WSChiaConnection, List[FullBlock]]]] + inner_batch_queue: asyncio.Queue[Optional[tuple[WSChiaConnection, list[FullBlock]]]] ) -> None: nonlocal fork_info block_rate = 0 block_rate_time = time.monotonic() block_rate_height = -1 while True: - res: Optional[Tuple[WSChiaConnection, List[FullBlock]]] = await inner_batch_queue.get() + res: Optional[tuple[WSChiaConnection, list[FullBlock]]] = await inner_batch_queue.get() if res is None: self.log.debug("done fetching blocks") return None @@ -1212,7 +1197,7 @@ async def validate_block_batches( # height, in that case. self.blockchain.clean_block_record(end_height - self.constants.BLOCKS_CACHE_SIZE) - batch_queue_input: asyncio.Queue[Optional[Tuple[WSChiaConnection, List[FullBlock]]]] = asyncio.Queue( + batch_queue_input: asyncio.Queue[Optional[tuple[WSChiaConnection, list[FullBlock]]]] = asyncio.Queue( maxsize=buffer_size ) fetch_task = asyncio.Task(fetch_block_batches(batch_queue_input)) @@ -1224,8 +1209,8 @@ async def validate_block_batches( assert validate_task.done() fetch_task.cancel() # no need to cancel validate_task, if we end up here validate_task is already done - def get_peers_with_peak(self, peak_hash: bytes32) -> List[WSChiaConnection]: - peer_ids: Set[bytes32] = self.sync_store.get_peers_that_have_peak([peak_hash]) + def get_peers_with_peak(self, peak_hash: bytes32) -> list[WSChiaConnection]: + peer_ids: set[bytes32] = self.sync_store.get_peers_that_have_peak([peak_hash]) if len(peer_ids) == 0: self.log.warning(f"Not syncing, no peers with header_hash {peak_hash} ") return [] @@ -1244,7 +1229,7 @@ async def update_wallets(self, wallet_update: WalletUpdate) -> None: self.log.debug( f"update_wallets - fork_height: {wallet_update.fork_height}, peak_height: {wallet_update.peak.height}" ) - changes_for_peer: Dict[bytes32, Set[CoinState]] = {} + changes_for_peer: dict[bytes32, set[CoinState]] = {} for coin_record in wallet_update.coin_records: coin_id = coin_record.name subscribed_peers = self.subscriptions.peers_for_coin_id(coin_id) @@ -1280,16 +1265,16 @@ async def update_wallets(self, wallet_update: WalletUpdate) -> None: async def add_block_batch( self, - all_blocks: List[FullBlock], + all_blocks: list[FullBlock], peer_info: PeerInfo, fork_info: Optional[ForkInfo], vs: ValidationState, # in-out parameter - wp_summaries: Optional[List[SubEpochSummary]] = None, - ) -> Tuple[bool, Optional[StateChangeSummary], Optional[Err]]: + wp_summaries: Optional[list[SubEpochSummary]] = None, + ) -> tuple[bool, Optional[StateChangeSummary], Optional[Err]]: # Precondition: All blocks must be contiguous blocks, index i+1 must be the parent of index i # Returns a bool for success, as well as a StateChangeSummary if the peak was advanced - blocks_to_validate: List[FullBlock] = [] + blocks_to_validate: list[FullBlock] = [] for i, block in enumerate(all_blocks): header_hash = block.header_hash block_rec = await self.blockchain.get_block_record_from_db(header_hash) @@ -1338,7 +1323,7 @@ async def add_block_batch( # call below. pre_validate_blocks_multiprocessing() will update the # object we pass in. pre_validate_start = time.monotonic() - pre_validation_results: List[PreValidationResult] = await pre_validate_blocks_multiprocessing( + pre_validation_results: list[PreValidationResult] = await pre_validate_blocks_multiprocessing( self.blockchain.constants, self.blockchain, blocks_to_validate, @@ -1435,7 +1420,7 @@ async def add_block_batch( async def get_sub_slot_iters_difficulty_ses_block( self, block: FullBlock, ssi: Optional[uint64], diff: Optional[uint64] - ) -> Tuple[uint64, uint64, Optional[BlockRecord]]: + ) -> tuple[uint64, uint64, Optional[BlockRecord]]: prev_ses_block = None if ssi is None or diff is None: if block.height == 0: @@ -1669,7 +1654,7 @@ async def peak_post_processing( ) # Update the mempool (returns successful pending transactions added to the mempool) - spent_coins: List[bytes32] = [coin_id for coin_id, _ in state_change_summary.removals] + spent_coins: list[bytes32] = [coin_id for coin_id, _ in state_change_summary.removals] mempool_new_peak_result = await self.mempool_manager.new_peak(self.blockchain.get_tx_peak(), spent_coins) return PeakPostProcessingResult( @@ -1735,7 +1720,7 @@ async def peak_post_processing_2( else: await self.server.send_to_all([msg], NodeType.FULL_NODE) - coin_hints: Dict[bytes32, bytes32] = { + coin_hints: dict[bytes32, bytes32] = { coin_id: bytes32(hint) for coin_id, hint in ppp_result.hints if len(hint) == 32 } @@ -1839,9 +1824,10 @@ async def add_block( return await self.add_block(new_block, peer, bls_cache) state_change_summary: Optional[StateChangeSummary] = None ppp_result: Optional[PeakPostProcessingResult] = None - async with self.blockchain.priority_mutex.acquire(priority=BlockchainMutexPriority.high), enable_profiler( - self.profile_block_validation - ) as pr: + async with ( + self.blockchain.priority_mutex.acquire(priority=BlockchainMutexPriority.high), + enable_profiler(self.profile_block_validation) as pr, + ): # After acquiring the lock, check again, because another asyncio thread might have added it if self.blockchain.contains_block(header_hash): return None @@ -1972,7 +1958,7 @@ async def add_block( self.full_node_store.clear_candidate_blocks_below(clear_height) self.full_node_store.clear_unfinished_blocks_below(clear_height) - state_changed_data: Dict[str, Any] = { + state_changed_data: dict[str, Any] = { "transaction_block": False, "k_size": block.reward_chain_block.proof_of_space.size, "header_hash": block.header_hash, @@ -2293,7 +2279,7 @@ async def new_infusion_point_vdf( ) return None - finished_sub_slots: Optional[List[EndOfSubSlotBundle]] = self.full_node_store.get_finished_sub_slots( + finished_sub_slots: Optional[list[EndOfSubSlotBundle]] = self.full_node_store.get_finished_sub_slots( self.blockchain, prev_b, last_slot_cc_hash, @@ -2353,7 +2339,7 @@ async def new_infusion_point_vdf( async def add_end_of_sub_slot( self, end_of_slot_bundle: EndOfSubSlotBundle, peer: WSChiaConnection - ) -> Tuple[Optional[Message], bool]: + ) -> tuple[Optional[Message], bool]: fetched_ss = self.full_node_store.get_sub_slot(end_of_slot_bundle.challenge_chain.get_hash()) # We are not interested in sub-slots which have the same challenge chain but different reward chain. If there @@ -2450,7 +2436,7 @@ async def add_end_of_sub_slot( async def add_transaction( self, transaction: SpendBundle, spend_name: bytes32, peer: Optional[WSChiaConnection] = None, test: bool = False - ) -> Tuple[MempoolInclusionStatus, Optional[Err]]: + ) -> tuple[MempoolInclusionStatus, Optional[Err]]: if self.sync_store.get_sync_mode(): return MempoolInclusionStatus.FAILED, Err.NO_TRANSACTIONS_WHILE_SYNCING if not test and not (await self.synced()): @@ -2565,7 +2551,7 @@ async def broadcast_added_tx( f"Broadcasting added transaction {mempool_item.name} to {len(peer_ids)} peers took {total_time:.4f}s", ) - async def broadcast_removed_tx(self, mempool_removals: List[MempoolRemoveInfo]) -> None: + async def broadcast_removed_tx(self, mempool_removals: list[MempoolRemoveInfo]) -> None: total_removals = sum(len(r.items) for r in mempool_removals) if total_removals == 0: return @@ -2583,7 +2569,7 @@ async def broadcast_removed_tx(self, mempool_removals: List[MempoolRemoveInfo]) if len(all_peers) == 0: return - removals_to_send: Dict[bytes32, List[RemovedMempoolItem]] = dict() + removals_to_send: dict[bytes32, list[RemovedMempoolItem]] = dict() for removal_info in mempool_removals: for internal_mempool_item in removal_info.items: @@ -2924,7 +2910,7 @@ async def broadcast_uncompact_blocks( return None await asyncio.sleep(30) - broadcast_list: List[timelord_protocol.RequestCompactProofOfTime] = [] + broadcast_list: list[timelord_protocol.RequestCompactProofOfTime] = [] self.log.info("Getting random heights for bluebox to compact") @@ -2940,7 +2926,7 @@ async def broadcast_uncompact_blocks( for h in heights: headers = await self.blockchain.get_header_blocks_in_range(h, h, tx_filter=False) - records: Dict[bytes32, BlockRecord] = {} + records: dict[bytes32, BlockRecord] = {} if sanitize_weight_proof_only: records = await self.blockchain.get_block_records_in_range(h, h) for header in headers.values(): @@ -3008,7 +2994,7 @@ async def broadcast_uncompact_blocks( ) ) - broadcast_list_chunks: List[List[timelord_protocol.RequestCompactProofOfTime]] = [] + broadcast_list_chunks: list[list[timelord_protocol.RequestCompactProofOfTime]] = [] for index in range(0, len(broadcast_list), target_uncompact_proofs): broadcast_list_chunks.append(broadcast_list[index : index + target_uncompact_proofs]) if len(broadcast_list_chunks) == 0: diff --git a/chia/full_node/full_node_api.py b/chia/full_node/full_node_api.py index 648b12ee6866..409410914899 100644 --- a/chia/full_node/full_node_api.py +++ b/chia/full_node/full_node_api.py @@ -7,7 +7,7 @@ import traceback from concurrent.futures import ThreadPoolExecutor from datetime import datetime, timezone -from typing import TYPE_CHECKING, Dict, List, Optional, Set, Tuple, cast +from typing import TYPE_CHECKING, Optional, cast import anyio from chia_rs import AugSchemeMPL, G1Element, G2Element, MerkleSet @@ -189,7 +189,7 @@ async def tx_request_and_timeout(full_node: FullNode, transaction_id: bytes32, t break if transaction_id not in full_node.full_node_store.peers_with_tx: break - peers_with_tx: Set[bytes32] = full_node.full_node_store.peers_with_tx[transaction_id] + peers_with_tx: set[bytes32] = full_node.full_node_store.peers_with_tx[transaction_id] if len(peers_with_tx) == 0: break peer_id = peers_with_tx.pop() @@ -347,7 +347,7 @@ async def request_blocks(self, request: full_node_protocol.RequestBlocks) -> Opt return msg if not request.include_transaction_block: - blocks: List[FullBlock] = [] + blocks: list[FullBlock] = [] for i in range(request.start_height, request.end_height + 1): header_hash_i: Optional[bytes32] = self.full_node.blockchain.height_to_hash(uint32(i)) if header_hash_i is None: @@ -365,7 +365,7 @@ async def request_blocks(self, request: full_node_protocol.RequestBlocks) -> Opt full_node_protocol.RespondBlocks(request.start_height, request.end_height, blocks), ) else: - blocks_bytes: List[bytes] = [] + blocks_bytes: list[bytes] = [] for i in range(request.start_height, request.end_height + 1): header_hash_i = self.full_node.blockchain.height_to_hash(uint32(i)) if header_hash_i is None: @@ -640,7 +640,7 @@ async def request_signage_point_or_end_of_sub_slot( self, request: full_node_protocol.RequestSignagePointOrEndOfSubSlot ) -> Optional[Message]: if request.index_from_challenge == 0: - sub_slot: Optional[Tuple[EndOfSubSlotBundle, int, uint128]] = self.full_node.full_node_store.get_sub_slot( + sub_slot: Optional[tuple[EndOfSubSlotBundle, int, uint128]] = self.full_node.full_node_store.get_sub_slot( request.challenge_hash ) if sub_slot is not None: @@ -749,7 +749,7 @@ async def request_mempool_transactions( ) -> Optional[Message]: received_filter = PyBIP158(bytearray(request.filter)) - items: List[SpendBundle] = self.full_node.mempool_manager.get_items_not_in_filter(received_filter) + items: list[SpendBundle] = self.full_node.mempool_manager.get_items_not_in_filter(received_filter) for item in items: transaction = full_node_protocol.RespondTransaction(item) @@ -792,7 +792,7 @@ async def declare_proof_of_space( assert sp_vdfs.cc_vdf is not None cc_challenge_hash = sp_vdfs.cc_vdf.challenge - pos_sub_slot: Optional[Tuple[EndOfSubSlotBundle, int, uint128]] = None + pos_sub_slot: Optional[tuple[EndOfSubSlotBundle, int, uint128]] = None if request.challenge_hash != self.full_node.constants.GENESIS_CHALLENGE: # Checks that the proof of space is a response to a recent challenge and valid SP pos_sub_slot = self.full_node.full_node_store.get_sub_slot(cc_challenge_hash) @@ -812,8 +812,8 @@ async def declare_proof_of_space( # Grab best transactions from Mempool for given tip target aggregate_signature: G2Element = G2Element() block_generator: Optional[BlockGenerator] = None - additions: Optional[List[Coin]] = [] - removals: Optional[List[Coin]] = [] + additions: Optional[list[Coin]] = [] + removals: Optional[list[Coin]] = [] async with self.full_node.blockchain.priority_mutex.acquire(priority=BlockchainMutexPriority.high): peak: Optional[BlockRecord] = self.full_node.blockchain.get_peak() @@ -906,7 +906,7 @@ def get_pool_sig(_1: PoolTarget, _2: Optional[G1Element]) -> Optional[G2Element] return None try: - finished_sub_slots: Optional[List[EndOfSubSlotBundle]] = ( + finished_sub_slots: Optional[list[EndOfSubSlotBundle]] = ( self.full_node.full_node_store.get_finished_sub_slots( self.full_node.blockchain, prev_b, cc_challenge_hash ) @@ -1074,7 +1074,7 @@ async def signed_values( block, which only needs a Proof of Time to be finished. If the signature is valid, we call the unfinished_block routine. """ - candidate_tuple: Optional[Tuple[uint32, UnfinishedBlock]] = self.full_node.full_node_store.get_candidate_block( + candidate_tuple: Optional[tuple[uint32, UnfinishedBlock]] = self.full_node.full_node_store.get_candidate_block( farmer_request.quality_string ) @@ -1185,8 +1185,8 @@ async def request_block_header(self, request: wallet_protocol.RequestBlockHeader if block is None: return None - tx_removals: List[bytes32] = [] - tx_additions: List[Coin] = [] + tx_removals: list[bytes32] = [] + tx_additions: list[Coin] = [] if block.transactions_generator is not None: block_generator: Optional[BlockGenerator] = await get_block_generator( @@ -1233,15 +1233,15 @@ async def request_additions(self, request: wallet_protocol.RequestAdditions) -> if self.full_node.blockchain.height_to_hash(request.height) != header_hash: raise ValueError(f"Block {header_hash} no longer in chain, or invalid header_hash") - puzzlehash_coins_map: Dict[bytes32, List[Coin]] = {} + puzzlehash_coins_map: dict[bytes32, list[Coin]] = {} for coin_record in additions: if coin_record.coin.puzzle_hash in puzzlehash_coins_map: puzzlehash_coins_map[coin_record.coin.puzzle_hash].append(coin_record.coin) else: puzzlehash_coins_map[coin_record.coin.puzzle_hash] = [coin_record.coin] - coins_map: List[Tuple[bytes32, List[Coin]]] = [] - proofs_map: List[Tuple[bytes32, bytes, Optional[bytes]]] = [] + coins_map: list[tuple[bytes32, list[Coin]]] = [] + proofs_map: list[tuple[bytes32, bytes, Optional[bytes]]] = [] if request.puzzle_hashes is None: for puzzle_hash, coins in puzzlehash_coins_map.items(): @@ -1250,7 +1250,7 @@ async def request_additions(self, request: wallet_protocol.RequestAdditions) -> else: # Create addition Merkle set # Addition Merkle set contains puzzlehash and hash of all coins with that puzzlehash - leafs: List[bytes32] = [] + leafs: list[bytes32] = [] for puzzle, coins in puzzlehash_coins_map.items(): leafs.append(puzzle) leafs.append(hash_coin_ids([c.name() for c in coins])) @@ -1295,21 +1295,21 @@ async def request_removals(self, request: wallet_protocol.RequestRemovals) -> Op assert block is not None and block.foliage_transaction_block is not None # Note: this might return bad data if there is a reorg in this time - all_removals: List[CoinRecord] = await self.full_node.coin_store.get_coins_removed_at_height(block.height) + all_removals: list[CoinRecord] = await self.full_node.coin_store.get_coins_removed_at_height(block.height) if self.full_node.blockchain.height_to_hash(block.height) != request.header_hash: raise ValueError(f"Block {block.header_hash} no longer in chain") - all_removals_dict: Dict[bytes32, Coin] = {} + all_removals_dict: dict[bytes32, Coin] = {} for coin_record in all_removals: all_removals_dict[coin_record.coin.name()] = coin_record.coin - coins_map: List[Tuple[bytes32, Optional[Coin]]] = [] - proofs_map: List[Tuple[bytes32, bytes]] = [] + coins_map: list[tuple[bytes32, Optional[Coin]]] = [] + proofs_map: list[tuple[bytes32, bytes]] = [] # If there are no transactions, respond with empty lists if block.transactions_generator is None: - proofs: Optional[List[Tuple[bytes32, bytes]]] + proofs: Optional[list[tuple[bytes32, bytes]]] if request.coin_names is None: proofs = None else: @@ -1321,7 +1321,7 @@ async def request_removals(self, request: wallet_protocol.RequestRemovals) -> Op response = wallet_protocol.RespondRemovals(block.height, block.header_hash, coins_map, None) else: assert block.transactions_generator - leafs: List[bytes32] = [] + leafs: list[bytes32] = [] for removed_name, removed_coin in all_removals_dict.items(): leafs.append(removed_name) removal_merkle_set = MerkleSet(leafs) @@ -1431,7 +1431,7 @@ async def request_block_headers(self, request: wallet_protocol.RequestBlockHeade else: height_to_hash = self.full_node.blockchain.height_to_hash - header_hashes: List[bytes32] = [] + header_hashes: list[bytes32] = [] for i in range(request.start_height, request.end_height + 1): header_hash: Optional[bytes32] = height_to_hash(uint32(i)) if header_hash is None: @@ -1442,7 +1442,7 @@ async def request_block_headers(self, request: wallet_protocol.RequestBlockHeade if len(blocks_bytes) != (request.end_height - request.start_height + 1): # +1 because interval is inclusive return make_msg(ProtocolMessageTypes.reject_block_headers, reject) return_filter = request.return_filter - header_blocks_bytes: List[bytes] = [header_block_from_block(memoryview(b), return_filter) for b in blocks_bytes] + header_blocks_bytes: list[bytes] = [header_block_from_block(memoryview(b), return_filter) for b in blocks_bytes] # we're building the RespondHeaderBlocks manually to avoid cost of # dynamic serialization @@ -1467,7 +1467,7 @@ async def request_header_blocks(self, request: wallet_protocol.RequestHeaderBloc ): return None height_to_hash = self.full_node.blockchain.height_to_hash - header_hashes: List[bytes32] = [] + header_hashes: list[bytes32] = [] for i in range(request.start_height, request.end_height + 1): header_hash: Optional[bytes32] = height_to_hash(uint32(i)) if header_hash is None: @@ -1476,7 +1476,7 @@ async def request_header_blocks(self, request: wallet_protocol.RequestHeaderBloc return msg header_hashes.append(header_hash) - blocks: List[FullBlock] = await self.full_node.block_store.get_blocks_by_hash(header_hashes) + blocks: list[FullBlock] = await self.full_node.block_store.get_blocks_by_hash(header_hashes) header_blocks = [] for block in blocks: added_coins_records_coroutine = self.full_node.coin_store.get_coins_added_at_height(block.height) @@ -1585,16 +1585,16 @@ async def register_interest_in_puzzle_hash( # before we send the response # Send all coins with requested puzzle hash that have been created after the specified height - states: Set[CoinState] = await self.full_node.coin_store.get_coin_states_by_puzzle_hashes( + states: set[CoinState] = await self.full_node.coin_store.get_coin_states_by_puzzle_hashes( include_spent_coins=True, puzzle_hashes=puzzle_hashes, min_height=request.min_height, max_items=max_items ) max_items -= len(states) hint_coin_ids = await self.full_node.hint_store.get_coin_ids_multi( - cast(Set[bytes], puzzle_hashes), max_items=max_items + cast(set[bytes], puzzle_hashes), max_items=max_items ) - hint_states: List[CoinState] = [] + hint_states: list[CoinState] = [] if len(hint_coin_ids) > 0: hint_states = await self.full_node.coin_store.get_coin_states_by_ids( include_spent_coins=True, @@ -1637,7 +1637,7 @@ async def register_interest_in_coin( # times, so we can't optimize away such DB lookups (yet) self.full_node.subscriptions.add_coin_subscriptions(peer.peer_node_id, request.coin_ids, max_subscriptions) - states: List[CoinState] = await self.full_node.coin_store.get_coin_states_by_ids( + states: list[CoinState] = await self.full_node.coin_store.get_coin_states_by_ids( include_spent_coins=True, coin_ids=set(request.coin_ids), min_height=request.min_height, max_items=max_items ) @@ -1647,7 +1647,7 @@ async def register_interest_in_coin( @api_request() async def request_children(self, request: wallet_protocol.RequestChildren) -> Optional[Message]: - coin_records: List[CoinRecord] = await self.full_node.coin_store.get_coin_records_by_parent_ids( + coin_records: list[CoinRecord] = await self.full_node.coin_store.get_coin_records_by_parent_ids( True, [request.coin_name] ) states = [record.coin_state for record in coin_records] @@ -1693,7 +1693,7 @@ async def request_ses_hashes(self, request: wallet_protocol.RequestSESInfo) -> M @api_request(reply_types=[ProtocolMessageTypes.respond_fee_estimates]) async def request_fee_estimates(self, request: wallet_protocol.RequestFeeEstimates) -> Message: - def get_fee_estimates(est: FeeEstimatorInterface, req_times: List[uint64]) -> List[FeeEstimate]: + def get_fee_estimates(est: FeeEstimatorInterface, req_times: list[uint64]) -> list[FeeEstimate]: now = datetime.now(timezone.utc) utc_time = now.replace(tzinfo=timezone.utc) utc_now = int(utc_time.timestamp()) @@ -1702,7 +1702,7 @@ def get_fee_estimates(est: FeeEstimatorInterface, req_times: List[uint64]) -> Li v1_fee_rates = [fee_rate_v2_to_v1(est) for est in fee_rates] return [FeeEstimate(None, req_ts, fee_rate) for req_ts, fee_rate in zip(req_times, v1_fee_rates)] - fee_estimates: List[FeeEstimate] = get_fee_estimates( + fee_estimates: list[FeeEstimate] = get_fee_estimates( self.full_node.mempool_manager.mempool.fee_estimator, request.time_targets ) response = RespondFeeEstimates(FeeEstimateGroup(error=None, estimates=fee_estimates)) @@ -1901,7 +1901,7 @@ async def request_cost_info(self, _request: wallet_protocol.RequestCostInfo) -> return msg async def mempool_updates_for_puzzle_hashes( - self, peer: WSChiaConnection, puzzle_hashes: Set[bytes32], include_hints: bool + self, peer: WSChiaConnection, puzzle_hashes: set[bytes32], include_hints: bool ) -> None: if Capability.MEMPOOL_UPDATES not in peer.peer_capabilities: return @@ -1913,10 +1913,10 @@ async def mempool_updates_for_puzzle_hashes( self.full_node.mempool_manager.mempool.items_with_puzzle_hashes(puzzle_hashes, include_hints) ) - hinted_coin_ids: Set[bytes32] = set() + hinted_coin_ids: set[bytes32] = set() for batch in to_batches(puzzle_hashes, SQLITE_MAX_VARIABLE_NUMBER): - hints_db: Tuple[bytes, ...] = tuple(batch.entries) + hints_db: tuple[bytes, ...] = tuple(batch.entries) cursor = await conn.execute( f"SELECT coin_id from hints INDEXED BY hint_index " f'WHERE hint IN ({"?," * (len(batch.entries) - 1)}?)', @@ -1939,7 +1939,7 @@ async def mempool_updates_for_puzzle_hashes( f"Sending initial mempool items to peer {peer.peer_node_id} took {total_time:.4f}s", ) - async def mempool_updates_for_coin_ids(self, peer: WSChiaConnection, coin_ids: Set[bytes32]) -> None: + async def mempool_updates_for_coin_ids(self, peer: WSChiaConnection, coin_ids: set[bytes32]) -> None: if Capability.MEMPOOL_UPDATES not in peer.peer_capabilities: return diff --git a/chia/full_node/full_node_store.py b/chia/full_node/full_node_store.py index 2a68bf2e1b3b..c25aa5f26e24 100644 --- a/chia/full_node/full_node_store.py +++ b/chia/full_node/full_node_store.py @@ -4,7 +4,7 @@ import dataclasses import logging import time -from typing import Dict, List, Optional, Set, Tuple +from typing import Optional from chia.consensus.block_record import BlockRecord from chia.consensus.blockchain_interface import BlockRecordsProtocol @@ -33,8 +33,8 @@ @dataclasses.dataclass(frozen=True) class FullNodeStorePeakResult(Streamable): added_eos: Optional[EndOfSubSlotBundle] - new_signage_points: List[Tuple[uint8, SignagePoint]] - new_infusion_points: List[timelord_protocol.NewInfusionPointVDF] + new_signage_points: list[tuple[uint8, SignagePoint]] + new_infusion_points: list[timelord_protocol.NewInfusionPointVDF] @dataclasses.dataclass @@ -49,8 +49,8 @@ class UnfinishedBlockEntry: def find_best_block( - result: Dict[Optional[bytes32], UnfinishedBlockEntry] -) -> Tuple[Optional[bytes32], Optional[UnfinishedBlock]]: + result: dict[Optional[bytes32], UnfinishedBlockEntry] +) -> tuple[Optional[bytes32], Optional[UnfinishedBlock]]: """ Given a collection of UnfinishedBlocks (all with the same reward block hash), return the "best" one. i.e. the one with the smallest foliage hash. @@ -67,7 +67,7 @@ def find_best_block( else: return foliage_hash, entry.unfinished_block - def include_block(item: Tuple[Optional[bytes32], UnfinishedBlockEntry]) -> bool: + def include_block(item: tuple[Optional[bytes32], UnfinishedBlockEntry]) -> bool: foliage_hash, entry = item return foliage_hash is not None and entry.unfinished_block is not None @@ -88,14 +88,14 @@ class FullNodeStore: constants: ConsensusConstants # Blocks which we have created, but don't have plot signatures yet, so not yet "unfinished blocks" - candidate_blocks: Dict[bytes32, Tuple[uint32, UnfinishedBlock]] - candidate_backup_blocks: Dict[bytes32, Tuple[uint32, UnfinishedBlock]] + candidate_blocks: dict[bytes32, tuple[uint32, UnfinishedBlock]] + candidate_backup_blocks: dict[bytes32, tuple[uint32, UnfinishedBlock]] # Block hashes of unfinished blocks that we have seen recently. This is # effectively a Set[bytes32] but in order to evict the oldest items first, # we use a Dict that preserves insertion order, and remove from the # beginning - seen_unfinished_blocks: Dict[bytes32, None] + seen_unfinished_blocks: dict[bytes32, None] # Unfinished blocks, keyed from reward hash # There may be multiple different unfinished blocks with the same partial @@ -107,36 +107,36 @@ class FullNodeStore: # The inner key (the foliage hash) is Optional, where None either means # it's not a transaction block, or it's a block we learned about via the old # protocol, where all we get is the reward block hash. - _unfinished_blocks: Dict[bytes32, Dict[Optional[bytes32], UnfinishedBlockEntry]] + _unfinished_blocks: dict[bytes32, dict[Optional[bytes32], UnfinishedBlockEntry]] # Finished slots and sps from the peak's slot onwards # We store all 32 SPs for each slot, starting as 32 Nones and filling them as we go # Also stores the total iters at the end of slot # For the first sub-slot, EndOfSlotBundle is None - finished_sub_slots: List[Tuple[Optional[EndOfSubSlotBundle], List[Optional[SignagePoint]], uint128]] + finished_sub_slots: list[tuple[Optional[EndOfSubSlotBundle], list[Optional[SignagePoint]], uint128]] # These caches maintain objects which depend on infused blocks in the reward chain, that we # might receive before the blocks themselves. The dict keys are the reward chain challenge hashes. # End of slots which depend on infusions that we don't have - future_eos_cache: Dict[bytes32, List[EndOfSubSlotBundle]] + future_eos_cache: dict[bytes32, list[EndOfSubSlotBundle]] # Signage points which depend on infusions that we don't have - future_sp_cache: Dict[bytes32, List[Tuple[uint8, SignagePoint]]] + future_sp_cache: dict[bytes32, list[tuple[uint8, SignagePoint]]] # Infusion point VDFs which depend on infusions that we don't have - future_ip_cache: Dict[bytes32, List[timelord_protocol.NewInfusionPointVDF]] + future_ip_cache: dict[bytes32, list[timelord_protocol.NewInfusionPointVDF]] # This stores the time that each key was added to the future cache, so we can clear old keys - future_cache_key_times: Dict[bytes32, int] + future_cache_key_times: dict[bytes32, int] # These recent caches are for pooling support - recent_signage_points: LRUCache[bytes32, Tuple[SignagePoint, float]] - recent_eos: LRUCache[bytes32, Tuple[EndOfSubSlotBundle, float]] + recent_signage_points: LRUCache[bytes32, tuple[SignagePoint, float]] + recent_eos: LRUCache[bytes32, tuple[EndOfSubSlotBundle, float]] - pending_tx_request: Dict[bytes32, bytes32] # tx_id: peer_id - peers_with_tx: Dict[bytes32, Set[bytes32]] # tx_id: Set[peer_ids} - tx_fetch_tasks: Dict[bytes32, asyncio.Task[None]] # Task id: task + pending_tx_request: dict[bytes32, bytes32] # tx_id: peer_id + peers_with_tx: dict[bytes32, set[bytes32]] # tx_id: Set[peer_ids} + tx_fetch_tasks: dict[bytes32, asyncio.Task[None]] # Task id: task serialized_wp_message: Optional[Message] serialized_wp_message_tip: Optional[bytes32] @@ -166,7 +166,7 @@ def __init__(self, constants: ConsensusConstants): def is_requesting_unfinished_block( self, reward_block_hash: bytes32, foliage_hash: Optional[bytes32] - ) -> Tuple[bool, int]: + ) -> tuple[bool, int]: """ Asks if we are already requesting this specific unfinished block (given the reward block hash and foliage hash). The returned bool is true if we @@ -212,7 +212,7 @@ def add_candidate_block( def get_candidate_block( self, quality_string: bytes32, backup: bool = False - ) -> Optional[Tuple[uint32, UnfinishedBlock]]: + ) -> Optional[tuple[uint32, UnfinishedBlock]]: if backup: return self.candidate_backup_blocks.get(quality_string, None) else: @@ -273,7 +273,7 @@ def get_unfinished_block(self, unfinished_reward_hash: bytes32) -> Optional[Unfi def get_unfinished_block2( self, unfinished_reward_hash: bytes32, unfinished_foliage_hash: Optional[bytes32] - ) -> Tuple[Optional[UnfinishedBlock], int, bool]: + ) -> tuple[Optional[UnfinishedBlock], int, bool]: """ Looks up an UnfinishedBlock by its reward block hash and foliage hash. If the foliage hash is None (e.g. it's not a transaction block), we fall @@ -315,8 +315,8 @@ def get_unfinished_block_result( return result.get(unfinished_foliage_hash) # returns all unfinished blocks for the specified height - def get_unfinished_blocks(self, height: uint32) -> List[UnfinishedBlock]: - ret: List[UnfinishedBlock] = [] + def get_unfinished_blocks(self, height: uint32) -> list[UnfinishedBlock]: + ret: list[UnfinishedBlock] = [] for entry in self._unfinished_blocks.values(): for ube in entry.values(): if ube.height == height and ube.unfinished_block is not None: @@ -324,9 +324,9 @@ def get_unfinished_blocks(self, height: uint32) -> List[UnfinishedBlock]: return ret def clear_unfinished_blocks_below(self, height: uint32) -> None: - del_partial: List[bytes32] = [] + del_partial: list[bytes32] = [] for partial_hash, entry in self._unfinished_blocks.items(): - del_foliage: List[Optional[bytes32]] = [] + del_foliage: list[Optional[bytes32]] = [] for foliage_hash, ube in entry.items(): if ube.height < height: del_foliage.append(foliage_hash) @@ -377,12 +377,12 @@ def add_to_future_sp(self, signage_point: SignagePoint, index: uint8) -> None: self.future_sp_cache[signage_point.rc_vdf.challenge].append((index, signage_point)) log.info(f"Don't have rc hash {signage_point.rc_vdf.challenge.hex()}. caching signage point {index}.") - def get_future_ip(self, rc_challenge_hash: bytes32) -> List[timelord_protocol.NewInfusionPointVDF]: + def get_future_ip(self, rc_challenge_hash: bytes32) -> list[timelord_protocol.NewInfusionPointVDF]: return self.future_ip_cache.get(rc_challenge_hash, []) def clear_old_cache_entries(self) -> None: current_time: int = int(time.time()) - remove_keys: List[bytes32] = [] + remove_keys: list[bytes32] = [] for rc_hash, time_added in self.future_cache_key_times.items(): if current_time - time_added > 3600: remove_keys.append(rc_hash) @@ -395,7 +395,7 @@ def clear_old_cache_entries(self) -> None: def clear_slots(self) -> None: self.finished_sub_slots.clear() - def get_sub_slot(self, challenge_hash: bytes32) -> Optional[Tuple[EndOfSubSlotBundle, int, uint128]]: + def get_sub_slot(self, challenge_hash: bytes32) -> Optional[tuple[EndOfSubSlotBundle, int, uint128]]: assert len(self.finished_sub_slots) >= 1 for index, (sub_slot, _, total_iters) in enumerate(self.finished_sub_slots): if sub_slot is not None and sub_slot.challenge_chain.get_hash() == challenge_hash: @@ -414,7 +414,7 @@ def new_finished_sub_slot( next_sub_slot_iters: uint64, next_difficulty: uint64, peak_full_block: Optional[FullBlock], - ) -> Optional[List[timelord_protocol.NewInfusionPointVDF]]: + ) -> Optional[list[timelord_protocol.NewInfusionPointVDF]]: """ Returns false if not added. Returns a list if added. The list contains all infusion points that depended on this sub slot @@ -671,7 +671,7 @@ def new_finished_sub_slot( new_cc_hash = eos.challenge_chain.get_hash() self.recent_eos.put(new_cc_hash, (eos, time.time())) - new_ips: List[timelord_protocol.NewInfusionPointVDF] = [] + new_ips: list[timelord_protocol.NewInfusionPointVDF] = [] for ip in self.future_ip_cache.get(eos.reward_chain.get_hash(), []): new_ips.append(ip) @@ -904,8 +904,8 @@ def new_peak( self.initialize_genesis_sub_slot() else: # This is not the first sub-slot in the chain - sp_sub_slot_sps: List[Optional[SignagePoint]] = [None] * self.constants.NUM_SPS_SUB_SLOT - ip_sub_slot_sps: List[Optional[SignagePoint]] = [None] * self.constants.NUM_SPS_SUB_SLOT + sp_sub_slot_sps: list[Optional[SignagePoint]] = [None] * self.constants.NUM_SPS_SUB_SLOT + ip_sub_slot_sps: list[Optional[SignagePoint]] = [None] * self.constants.NUM_SPS_SUB_SLOT if fork_block is not None and fork_block.sub_slot_iters != peak.sub_slot_iters: # If there was a reorg and a difficulty adjustment, just clear all the slots @@ -921,7 +921,7 @@ def new_peak( if fork_block is None: # If this is not a reorg, we still want to remove signage points after the new peak fork_block = peak - replaced_sps: List[Optional[SignagePoint]] = [] # index 0 is the end of sub slot + replaced_sps: list[Optional[SignagePoint]] = [] # index 0 is the end of sub slot for i, sp in enumerate(sps): if (total_iters + i * interval_iters) < fork_block.total_iters: # Sps before the fork point as still valid @@ -951,10 +951,10 @@ def new_peak( self.finished_sub_slots.append((ip_sub_slot, ip_sub_slot_sps, ip_sub_slot_total_iters)) new_eos: Optional[EndOfSubSlotBundle] = None - new_sps: List[Tuple[uint8, SignagePoint]] = [] - new_ips: List[timelord_protocol.NewInfusionPointVDF] = [] + new_sps: list[tuple[uint8, SignagePoint]] = [] + new_ips: list[timelord_protocol.NewInfusionPointVDF] = [] - future_eos: List[EndOfSubSlotBundle] = self.future_eos_cache.get(peak.reward_infusion_new_challenge, []).copy() + future_eos: list[EndOfSubSlotBundle] = self.future_eos_cache.get(peak.reward_infusion_new_challenge, []).copy() for eos in future_eos: if ( self.new_finished_sub_slot(eos, blocks, peak, next_sub_slot_iters, next_difficulty, peak_full_block) @@ -963,7 +963,7 @@ def new_peak( new_eos = eos break - future_sps: List[Tuple[uint8, SignagePoint]] = self.future_sp_cache.get( + future_sps: list[tuple[uint8, SignagePoint]] = self.future_sp_cache.get( peak.reward_infusion_new_challenge, [] ).copy() for index, sp in future_sps: @@ -990,7 +990,7 @@ def get_finished_sub_slots( block_records: BlockRecordsProtocol, prev_b: Optional[BlockRecord], last_challenge_to_add: bytes32, - ) -> Optional[List[EndOfSubSlotBundle]]: + ) -> Optional[list[EndOfSubSlotBundle]]: """ Retrieves the EndOfSubSlotBundles that are in the store either: 1. From the starting challenge if prev_b is None @@ -1015,7 +1015,7 @@ def get_finished_sub_slots( # No additional slots to add return [] - collected_sub_slots: List[EndOfSubSlotBundle] = [] + collected_sub_slots: list[EndOfSubSlotBundle] = [] found_last_challenge = False found_connecting_challenge = False for sub_slot, sps, total_iters in self.finished_sub_slots[1:]: diff --git a/chia/full_node/hint_management.py b/chia/full_node/hint_management.py index ccb962b41256..810847164fc5 100644 --- a/chia/full_node/hint_management.py +++ b/chia/full_node/hint_management.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Callable, List, Optional, Set, Tuple +from typing import Callable, Optional from chia.consensus.blockchain import StateChangeSummary from chia.types.blockchain_format.sized_bytes import bytes32 @@ -10,16 +10,16 @@ def get_hints_and_subscription_coin_ids( state_change_summary: StateChangeSummary, has_coin_subscription: Callable[[bytes32], bool], has_puzzle_subscription: Callable[[bytes32], bool], -) -> Tuple[List[Tuple[bytes32, bytes]], List[bytes32]]: +) -> tuple[list[tuple[bytes32, bytes]], list[bytes32]]: # Precondition: all hints passed in are max 32 bytes long # Returns the hints that we need to add to the DB, and the coin ids that need to be looked up # Finds the coin IDs that we need to lookup in order to notify wallets of hinted transactions hint: Optional[bytes] - hints_to_add: List[Tuple[bytes32, bytes]] = [] + hints_to_add: list[tuple[bytes32, bytes]] = [] # Goes through additions and removals for each block and flattens to a map and a set - lookup_coin_ids: Set[bytes32] = set() + lookup_coin_ids: set[bytes32] = set() def add_if_coin_subscription(coin_id: bytes32) -> None: if has_coin_subscription(coin_id): diff --git a/chia/full_node/hint_store.py b/chia/full_node/hint_store.py index 9cbc0b26457b..40a427d17000 100644 --- a/chia/full_node/hint_store.py +++ b/chia/full_node/hint_store.py @@ -2,7 +2,6 @@ import dataclasses import logging -from typing import List, Set, Tuple import typing_extensions @@ -32,19 +31,19 @@ async def create(cls, db_wrapper: DBWrapper2) -> HintStore: await conn.execute("CREATE INDEX IF NOT EXISTS hint_index on hints(hint)") return self - async def get_coin_ids(self, hint: bytes, *, max_items: int = 50000) -> List[bytes32]: + async def get_coin_ids(self, hint: bytes, *, max_items: int = 50000) -> list[bytes32]: async with self.db_wrapper.reader_no_transaction() as conn: cursor = await conn.execute("SELECT coin_id from hints WHERE hint=? LIMIT ?", (hint, max_items)) rows = await cursor.fetchall() await cursor.close() return [bytes32(row[0]) for row in rows] - async def get_coin_ids_multi(self, hints: Set[bytes], *, max_items: int = 50000) -> List[bytes32]: - coin_ids: List[bytes32] = [] + async def get_coin_ids_multi(self, hints: set[bytes], *, max_items: int = 50000) -> list[bytes32]: + coin_ids: list[bytes32] = [] async with self.db_wrapper.reader_no_transaction() as conn: for batch in to_batches(hints, SQLITE_MAX_VARIABLE_NUMBER): - hints_db: Tuple[bytes, ...] = tuple(batch.entries) + hints_db: tuple[bytes, ...] = tuple(batch.entries) cursor = await conn.execute( f"SELECT coin_id from hints INDEXED BY hint_index " f'WHERE hint IN ({"?," * (len(batch.entries) - 1)}?) LIMIT ?', @@ -56,12 +55,12 @@ async def get_coin_ids_multi(self, hints: Set[bytes], *, max_items: int = 50000) return coin_ids - async def get_hints(self, coin_ids: List[bytes32]) -> List[bytes32]: - hints: List[bytes32] = [] + async def get_hints(self, coin_ids: list[bytes32]) -> list[bytes32]: + hints: list[bytes32] = [] async with self.db_wrapper.reader_no_transaction() as conn: for batch in to_batches(coin_ids, SQLITE_MAX_VARIABLE_NUMBER): - coin_ids_db: Tuple[bytes32, ...] = tuple(batch.entries) + coin_ids_db: tuple[bytes32, ...] = tuple(batch.entries) cursor = await conn.execute( f'SELECT hint from hints WHERE coin_id IN ({"?," * (len(batch.entries) - 1)}?)', coin_ids_db, @@ -72,7 +71,7 @@ async def get_hints(self, coin_ids: List[bytes32]) -> List[bytes32]: return hints - async def add_hints(self, coin_hint_list: List[Tuple[bytes32, bytes]]) -> None: + async def add_hints(self, coin_hint_list: list[tuple[bytes32, bytes]]) -> None: if len(coin_hint_list) == 0: return None diff --git a/chia/full_node/mempool.py b/chia/full_node/mempool.py index 0996d0093f65..eee41e63ebc3 100644 --- a/chia/full_node/mempool.py +++ b/chia/full_node/mempool.py @@ -2,11 +2,12 @@ import logging import sqlite3 +from collections.abc import Awaitable, Iterator from dataclasses import dataclass from datetime import datetime from enum import Enum from time import monotonic -from typing import Awaitable, Callable, Dict, Iterator, List, Optional, Set, Tuple +from typing import Callable, Optional from chia_rs import AugSchemeMPL, Coin, G2Element @@ -50,13 +51,13 @@ @dataclass class MempoolRemoveInfo: - items: List[InternalMempoolItem] + items: list[InternalMempoolItem] reason: MempoolRemoveReason @dataclass class MempoolAddInfo: - removals: List[MempoolRemoveInfo] + removals: list[MempoolRemoveInfo] error: Optional[Err] @@ -71,7 +72,7 @@ class Mempool: _db_conn: sqlite3.Connection # it's expensive to serialize and deserialize G2Element, so we keep those in # this separate dictionary - _items: Dict[bytes32, InternalMempoolItem] + _items: dict[bytes32, InternalMempoolItem] # the most recent block height and timestamp that we know of _block_height: uint32 @@ -164,18 +165,18 @@ def all_items(self) -> Iterator[MempoolItem]: for row in cursor: yield self._row_to_item(row) - def all_item_ids(self) -> List[bytes32]: + def all_item_ids(self) -> list[bytes32]: with self._db_conn: cursor = self._db_conn.execute("SELECT name FROM tx") return [bytes32(row[0]) for row in cursor] - def items_with_coin_ids(self, coin_ids: Set[bytes32]) -> List[bytes32]: + def items_with_coin_ids(self, coin_ids: set[bytes32]) -> list[bytes32]: """ Returns a list of transaction ids that spend or create any coins with the provided coin ids. This iterates over the internal items instead of using a query. """ - transaction_ids: List[bytes32] = [] + transaction_ids: list[bytes32] = [] for transaction_id, item in self._items.items(): conds = item.conds @@ -197,14 +198,14 @@ def items_with_coin_ids(self, coin_ids: Set[bytes32]) -> List[bytes32]: return transaction_ids - def items_with_puzzle_hashes(self, puzzle_hashes: Set[bytes32], include_hints: bool) -> List[bytes32]: + def items_with_puzzle_hashes(self, puzzle_hashes: set[bytes32], include_hints: bool) -> list[bytes32]: """ Returns a list of transaction ids that spend or create any coins with the provided puzzle hashes (or hints, if enabled). This iterates over the internal items instead of using a query. """ - transaction_ids: List[bytes32] = [] + transaction_ids: list[bytes32] = [] for transaction_id, item in self._items.items(): conds = item.conds @@ -261,8 +262,8 @@ def get_items_by_coin_id(self, spent_coin_id: bytes32) -> Iterator[MempoolItem]: for row in cursor: yield self._row_to_item(row) - def get_items_by_coin_ids(self, spent_coin_ids: List[bytes32]) -> List[MempoolItem]: - items: List[MempoolItem] = [] + def get_items_by_coin_ids(self, spent_coin_ids: list[bytes32]) -> list[MempoolItem]: + items: list[MempoolItem] = [] for batch in to_batches(spent_coin_ids, SQLITE_MAX_VARIABLE_NUMBER): args = ",".join(["?"] * len(batch.entries)) cursor = self._db_conn.execute( @@ -318,14 +319,14 @@ def new_tx_block(self, block_height: uint32, timestamp: uint64) -> MempoolRemove return self.remove_from_pool(to_remove, MempoolRemoveReason.EXPIRED) - def remove_from_pool(self, items: List[bytes32], reason: MempoolRemoveReason) -> MempoolRemoveInfo: + def remove_from_pool(self, items: list[bytes32], reason: MempoolRemoveReason) -> MempoolRemoveInfo: """ Removes an item from the mempool. """ if items == []: return MempoolRemoveInfo([], reason) - removed_items: List[MempoolItemInfo] = [] + removed_items: list[MempoolItemInfo] = [] if reason != MempoolRemoveReason.BLOCK_INCLUSION: for batch in to_batches(items, SQLITE_MAX_VARIABLE_NUMBER): args = ",".join(["?"] * len(batch.entries)) @@ -375,7 +376,7 @@ def add_to_pool(self, item: MempoolItem) -> MempoolAddInfo: assert item.conds is not None assert item.cost <= self.mempool_info.max_block_clvm_cost - removals: List[MempoolRemoveInfo] = [] + removals: list[MempoolRemoveInfo] = [] # we have certain limits on transactions that will expire soon # (in the next 15 minutes) @@ -399,7 +400,7 @@ def add_to_pool(self, item: MempoolItem) -> MempoolAddInfo: """, (time_cutoff, block_cutoff), ) - to_remove: List[bytes32] = [] + to_remove: list[bytes32] = [] for row in cursor: name, fee_per_cost, cumulative_cost = row @@ -476,11 +477,11 @@ async def create_bundle_from_mempool_items( get_unspent_lineage_info_for_puzzle_hash: Callable[[bytes32], Awaitable[Optional[UnspentLineageInfo]]], constants: ConsensusConstants, height: uint32, - ) -> Optional[Tuple[SpendBundle, List[Coin]]]: + ) -> Optional[tuple[SpendBundle, list[Coin]]]: cost_sum = 0 # Checks that total cost does not exceed block maximum fee_sum = 0 # Checks that total fees don't exceed 64 bits processed_spend_bundles = 0 - additions: List[Coin] = [] + additions: list[Coin] = [] # This contains: # 1. A map of coin ID to a coin spend solution and its isolated cost # We reconstruct it for every bundle we create from mempool items because we @@ -490,8 +491,8 @@ async def create_bundle_from_mempool_items( # recent unspent singleton data, to allow chaining fast forward # singleton spends eligible_coin_spends = EligibleCoinSpends() - coin_spends: List[CoinSpend] = [] - sigs: List[G2Element] = [] + coin_spends: list[CoinSpend] = [] + sigs: list[G2Element] = [] log.info(f"Starting to make block, max cost: {self.mempool_info.max_block_clvm_cost}") bundle_creation_start = monotonic() cursor = self._db_conn.execute("SELECT name, fee FROM tx ORDER BY fee_per_cost DESC, seq ASC") diff --git a/chia/full_node/mempool_check_conditions.py b/chia/full_node/mempool_check_conditions.py index 57b2f8e5fb63..fd37f0b9f158 100644 --- a/chia/full_node/mempool_check_conditions.py +++ b/chia/full_node/mempool_check_conditions.py @@ -1,7 +1,7 @@ from __future__ import annotations import logging -from typing import Dict, List, Optional +from typing import Optional from chia_rs import DONT_VALIDATE_SIGNATURE, MEMPOOL_MODE, G2Element, get_flags_for_height_and_constants from chia_rs import get_puzzle_and_solution_for_coin2 as get_puzzle_and_solution_for_coin_rust @@ -76,7 +76,7 @@ def get_puzzle_and_solution_for_coin( raise ValueError(f"Failed to get puzzle and solution for coin {coin}, error: {e}") from e -def get_spends_for_block(generator: BlockGenerator, height: int, constants: ConsensusConstants) -> List[CoinSpend]: +def get_spends_for_block(generator: BlockGenerator, height: int, constants: ConsensusConstants) -> list[CoinSpend]: args = bytearray(b"\xff") args += bytes(DESERIALIZE_MOD) args += b"\xff" @@ -90,7 +90,7 @@ def get_spends_for_block(generator: BlockGenerator, height: int, constants: Cons get_flags_for_height_and_constants(height, constants), ) - spends: List[CoinSpend] = [] + spends: list[CoinSpend] = [] for spend in Program.to(ret).first().as_iter(): parent, puzzle, amount, solution = spend.as_iter() @@ -103,7 +103,7 @@ def get_spends_for_block(generator: BlockGenerator, height: int, constants: Cons def get_spends_for_block_with_conditions( generator: BlockGenerator, height: int, constants: ConsensusConstants -) -> List[CoinSpendWithConditions]: +) -> list[CoinSpendWithConditions]: args = bytearray(b"\xff") args += bytes(DESERIALIZE_MOD) args += b"\xff" @@ -119,7 +119,7 @@ def get_spends_for_block_with_conditions( flags, ) - spends: List[CoinSpendWithConditions] = [] + spends: list[CoinSpendWithConditions] = [] for spend in Program.to(ret).first().as_iter(): parent, puzzle, amount, solution = spend.as_iter() @@ -133,7 +133,7 @@ def get_spends_for_block_with_conditions( def mempool_check_time_locks( - removal_coin_records: Dict[bytes32, CoinRecord], + removal_coin_records: dict[bytes32, CoinRecord], bundle_conds: SpendBundleConditions, prev_transaction_block_height: uint32, timestamp: uint64, diff --git a/chia/full_node/mempool_manager.py b/chia/full_node/mempool_manager.py index 51af98ffa6e5..093eb4108a5b 100644 --- a/chia/full_node/mempool_manager.py +++ b/chia/full_node/mempool_manager.py @@ -3,9 +3,10 @@ import asyncio import logging import time +from collections.abc import Awaitable, Collection from concurrent.futures import Executor, ThreadPoolExecutor from dataclasses import dataclass -from typing import Awaitable, Callable, Collection, Dict, List, Optional, Set, Tuple, TypeVar +from typing import Callable, Optional, TypeVar from chia_rs import ELIGIBLE_FOR_DEDUP, ELIGIBLE_FOR_FF, BLSCache, supports_fast_forward, validate_clvm_and_signature from chiabip158 import PyBIP158 @@ -51,7 +52,7 @@ class TimelockConditions: def compute_assert_height( - removal_coin_records: Dict[bytes32, CoinRecord], + removal_coin_records: dict[bytes32, CoinRecord], conds: SpendBundleConditions, ) -> TimelockConditions: """ @@ -102,14 +103,14 @@ def compute_assert_height( class SpendBundleAddInfo: cost: Optional[uint64] status: MempoolInclusionStatus - removals: List[MempoolRemoveInfo] + removals: list[MempoolRemoveInfo] error: Optional[Err] @dataclass class NewPeakInfo: - items: List[NewPeakItem] - removals: List[MempoolRemoveInfo] + items: list[NewPeakItem] + removals: list[MempoolRemoveInfo] @dataclass @@ -127,8 +128,8 @@ class NewPeakItem: class MempoolManager: pool: Executor constants: ConsensusConstants - seen_bundle_hashes: Dict[bytes32, bytes32] - get_coin_records: Callable[[Collection[bytes32]], Awaitable[List[CoinRecord]]] + seen_bundle_hashes: dict[bytes32, bytes32] + get_coin_records: Callable[[Collection[bytes32]], Awaitable[list[CoinRecord]]] nonzero_fee_minimum_fpc: int mempool_max_total_cost: int # a cache of MempoolItems that conflict with existing items in the pool @@ -144,7 +145,7 @@ class MempoolManager: def __init__( self, - get_coin_records: Callable[[Collection[bytes32]], Awaitable[List[CoinRecord]]], + get_coin_records: Callable[[Collection[bytes32]], Awaitable[list[CoinRecord]]], consensus_constants: ConsensusConstants, *, single_threaded: bool = False, @@ -153,7 +154,7 @@ def __init__( self.constants: ConsensusConstants = consensus_constants # Keep track of seen spend_bundles - self.seen_bundle_hashes: Dict[bytes32, bytes32] = {} + self.seen_bundle_hashes: dict[bytes32, bytes32] = {} self.get_coin_records = get_coin_records @@ -205,7 +206,7 @@ async def create_bundle_from_mempool( last_tb_header_hash: bytes32, get_unspent_lineage_info_for_puzzle_hash: Callable[[bytes32], Awaitable[Optional[UnspentLineageInfo]]], item_inclusion_filter: Optional[Callable[[bytes32], bool]] = None, - ) -> Optional[Tuple[SpendBundle, List[Coin]]]: + ) -> Optional[tuple[SpendBundle, list[Coin]]]: """ Returns aggregated spendbundle that can be used for creating new block, additions and removals in that spend_bundle @@ -224,7 +225,7 @@ def always(bundle_name: bytes32) -> bool: ) def get_filter(self) -> bytes: - all_transactions: Set[bytes32] = set() + all_transactions: set[bytes32] = set() byte_array_list = [] for key in self.mempool.all_item_ids(): if key not in all_transactions: @@ -321,7 +322,7 @@ async def add_spend_bundle( conds: SpendBundleConditions, spend_name: bytes32, first_added_height: uint32, - get_coin_records: Optional[Callable[[Collection[bytes32]], Awaitable[List[CoinRecord]]]] = None, + get_coin_records: Optional[Callable[[Collection[bytes32]], Awaitable[list[CoinRecord]]]] = None, ) -> SpendBundleAddInfo: """ Validates and adds to mempool a new_spend with the given NPCResult, and spend_name, and the current mempool. @@ -383,8 +384,8 @@ async def validate_spend_bundle( conds: SpendBundleConditions, spend_name: bytes32, first_added_height: uint32, - get_coin_records: Callable[[Collection[bytes32]], Awaitable[List[CoinRecord]]], - ) -> Tuple[Optional[Err], Optional[MempoolItem], List[bytes32]]: + get_coin_records: Callable[[Collection[bytes32]], Awaitable[list[CoinRecord]]], + ) -> tuple[Optional[Err], Optional[MempoolItem], list[bytes32]]: """ Validates new_spend with the given NPCResult, and spend_name, and the current mempool. The mempool should be locked during this call (blockchain lock). @@ -408,12 +409,12 @@ async def validate_spend_bundle( cost = conds.cost - removal_names: Set[bytes32] = set() - additions_dict: Dict[bytes32, Coin] = {} + removal_names: set[bytes32] = set() + additions_dict: dict[bytes32, Coin] = {} addition_amount: int = 0 # Map of coin ID to eligibility information - eligibility_and_additions: Dict[bytes32, EligibilityAndAdditions] = {} - non_eligible_coin_ids: List[bytes32] = [] + eligibility_and_additions: dict[bytes32, EligibilityAndAdditions] = {} + non_eligible_coin_ids: list[bytes32] = [] for spend in conds.spends: coin_id = bytes32(spend.coin_id) removal_names.add(coin_id) @@ -430,9 +431,9 @@ async def validate_spend_bundle( spend_additions=spend_additions, is_eligible_for_ff=is_eligible_for_ff, ) - removal_names_from_coin_spends: Set[bytes32] = set() - fast_forward_coin_ids: Set[bytes32] = set() - bundle_coin_spends: Dict[bytes32, BundleCoinSpend] = {} + removal_names_from_coin_spends: set[bytes32] = set() + fast_forward_coin_ids: set[bytes32] = set() + bundle_coin_spends: dict[bytes32, BundleCoinSpend] = {} for coin_spend in new_spend.coin_spends: coin_id = coin_spend.coin.name() removal_names_from_coin_spends.add(coin_id) @@ -457,7 +458,7 @@ async def validate_spend_bundle( # If you reach here it's probably because your program reveal doesn't match the coin's puzzle hash return Err.INVALID_SPEND_BUNDLE, None, [] - removal_record_dict: Dict[bytes32, CoinRecord] = {} + removal_record_dict: dict[bytes32, CoinRecord] = {} removal_amount: int = 0 removal_records = await get_coin_records(removal_names) for record in removal_records: @@ -587,10 +588,10 @@ async def validate_spend_bundle( def check_removals( self, - non_eligible_coin_ids: List[bytes32], - removals: Dict[bytes32, CoinRecord], - fast_forward_coin_ids: Set[bytes32], - ) -> Tuple[Optional[Err], List[MempoolItem]]: + non_eligible_coin_ids: list[bytes32], + removals: dict[bytes32, CoinRecord], + fast_forward_coin_ids: set[bytes32], + ) -> tuple[Optional[Err], list[MempoolItem]]: """ This function checks for double spends, unknown spends and conflicting transactions in mempool. Returns Error (if any), the set of existing MempoolItems with conflicting spends (if any). @@ -636,7 +637,7 @@ def get_mempool_item(self, bundle_hash: bytes32, include_pending: bool = False) return item async def new_peak( - self, new_peak: Optional[BlockRecordProtocol], spent_coins: Optional[List[bytes32]] + self, new_peak: Optional[BlockRecordProtocol], spent_coins: Optional[list[bytes32]] ) -> NewPeakInfo: """ Called when a new peak is available, we try to recreate a mempool for the new tip. @@ -655,10 +656,10 @@ async def new_peak( return NewPeakInfo([], []) assert new_peak.timestamp is not None self.fee_estimator.new_block_height(new_peak.height) - included_items: List[MempoolItemInfo] = [] + included_items: list[MempoolItemInfo] = [] expired = self.mempool.new_tx_block(new_peak.height, new_peak.timestamp) - mempool_item_removals: List[MempoolRemoveInfo] = [expired] + mempool_item_removals: list[MempoolRemoveInfo] = [expired] use_optimization: bool = self.peak is not None and new_peak.prev_transaction_block_hash == self.peak.header_hash self.peak = new_peak @@ -669,7 +670,7 @@ async def new_peak( # when looking up transactions by all coin IDs, we're likely to # find the same transaction multiple times. We put them in a set # to deduplicate - spendbundle_ids_to_remove: Set[bytes32] = set() + spendbundle_ids_to_remove: set[bytes32] = set() for spend in spent_coins: items = self.mempool.get_items_by_coin_id(spend) for item in items: @@ -692,9 +693,9 @@ async def new_peak( # in order to make this a bit quicker, we look-up all the spends in # a single query, rather than one at a time. - coin_records: Dict[bytes32, CoinRecord] = {} + coin_records: dict[bytes32, CoinRecord] = {} - removals: Set[bytes32] = set() + removals: set[bytes32] = set() for item in old_pool.all_items(): for s in item.spend_bundle.coin_spends: removals.add(s.coin.name()) @@ -703,8 +704,8 @@ async def new_peak( name = record.coin.name() coin_records[name] = record - async def local_get_coin_records(names: Collection[bytes32]) -> List[CoinRecord]: - ret: List[CoinRecord] = [] + async def local_get_coin_records(names: Collection[bytes32]) -> list[CoinRecord]: + ret: list[CoinRecord] = [] for name in names: r = coin_records.get(name) if r is not None: @@ -751,8 +752,8 @@ async def local_get_coin_records(names: Collection[bytes32]) -> List[CoinRecord] self.mempool.fee_estimator.new_block(FeeBlockInfo(new_peak.height, included_items)) return NewPeakInfo(txs_added, mempool_item_removals) - def get_items_not_in_filter(self, mempool_filter: PyBIP158, limit: int = 100) -> List[SpendBundle]: - items: List[SpendBundle] = [] + def get_items_not_in_filter(self, mempool_filter: PyBIP158, limit: int = 100) -> list[SpendBundle]: + items: list[SpendBundle] = [] assert limit > 0 @@ -779,8 +780,8 @@ def optional_max(a: Optional[T], b: Optional[T]) -> Optional[T]: def can_replace( - conflicting_items: List[MempoolItem], - removal_names: Set[bytes32], + conflicting_items: list[MempoolItem], + removal_names: set[bytes32], new_item: MempoolItem, ) -> bool: """ diff --git a/chia/full_node/pending_tx_cache.py b/chia/full_node/pending_tx_cache.py index f850052aa599..e9f54b7c01bb 100644 --- a/chia/full_node/pending_tx_cache.py +++ b/chia/full_node/pending_tx_cache.py @@ -1,7 +1,7 @@ from __future__ import annotations from dataclasses import dataclass, field -from typing import Dict, Optional +from typing import Optional from sortedcontainers import SortedDict @@ -15,7 +15,7 @@ class ConflictTxCache: _cache_max_total_cost: int _cache_max_size: int = 1000 _cache_cost: int = field(default=0, init=False) - _txs: Dict[bytes32, MempoolItem] = field(default_factory=dict, init=False) + _txs: dict[bytes32, MempoolItem] = field(default_factory=dict, init=False) def get(self, bundle_name: bytes32) -> Optional[MempoolItem]: return self._txs.get(bundle_name, None) @@ -38,7 +38,7 @@ def add(self, item: MempoolItem) -> None: self._cache_cost -= self._txs[first_in].cost self._txs.pop(first_in) - def drain(self) -> Dict[bytes32, MempoolItem]: + def drain(self) -> dict[bytes32, MempoolItem]: ret = self._txs self._txs = {} self._cache_cost = 0 @@ -53,8 +53,8 @@ class PendingTxCache: _cache_max_total_cost: int _cache_max_size: int = 3000 _cache_cost: int = field(default=0, init=False) - _txs: Dict[bytes32, MempoolItem] = field(default_factory=dict, init=False) - _by_height: SortedDict[uint32, Dict[bytes32, MempoolItem]] = field(default_factory=SortedDict, init=False) + _txs: dict[bytes32, MempoolItem] = field(default_factory=dict, init=False) + _by_height: SortedDict[uint32, dict[bytes32, MempoolItem]] = field(default_factory=SortedDict, init=False) def get(self, bundle_name: bytes32) -> Optional[MempoolItem]: return self._txs.get(bundle_name, None) @@ -89,8 +89,8 @@ def add(self, item: MempoolItem) -> None: if to_evict[1] == {}: self._by_height.popitem() - def drain(self, up_to_height: uint32) -> Dict[bytes32, MempoolItem]: - ret: Dict[bytes32, MempoolItem] = {} + def drain(self, up_to_height: uint32) -> dict[bytes32, MempoolItem]: + ret: dict[bytes32, MempoolItem] = {} if self._txs == {}: return ret diff --git a/chia/full_node/subscriptions.py b/chia/full_node/subscriptions.py index 46db31bb177c..0128a5ea1400 100644 --- a/chia/full_node/subscriptions.py +++ b/chia/full_node/subscriptions.py @@ -2,7 +2,6 @@ import logging from dataclasses import dataclass, field -from typing import Dict, List, Set from chia_rs import Coin @@ -15,8 +14,8 @@ @dataclass(frozen=True) class SubscriptionSet: - _subscriptions_for_peer: Dict[bytes32, Set[bytes32]] = field(default_factory=dict, init=False) - _peers_for_subscription: Dict[bytes32, Set[bytes32]] = field(default_factory=dict, init=False) + _subscriptions_for_peer: dict[bytes32, set[bytes32]] = field(default_factory=dict, init=False) + _peers_for_subscription: dict[bytes32, set[bytes32]] = field(default_factory=dict, init=False) def add_subscription(self, peer_id: bytes32, item: bytes32) -> bool: peers = self._peers_for_subscription.setdefault(item, set()) @@ -61,10 +60,10 @@ def remove_peer(self, peer_id: bytes32) -> None: if len(self._peers_for_subscription[item]) == 0: self._peers_for_subscription.pop(item) - def subscriptions(self, peer_id: bytes32) -> Set[bytes32]: + def subscriptions(self, peer_id: bytes32) -> set[bytes32]: return self._subscriptions_for_peer.get(peer_id, set()) - def peers(self, item: bytes32) -> Set[bytes32]: + def peers(self, item: bytes32) -> set[bytes32]: return self._peers_for_subscription.get(item, set()) def total_count(self) -> int: @@ -87,15 +86,15 @@ def peer_subscription_count(self, peer_id: bytes32) -> int: coin_subscriptions = self._coin_subscriptions.count_subscriptions(peer_id) return puzzle_subscriptions + coin_subscriptions - def add_puzzle_subscriptions(self, peer_id: bytes32, puzzle_hashes: List[bytes32], max_items: int) -> Set[bytes32]: + def add_puzzle_subscriptions(self, peer_id: bytes32, puzzle_hashes: list[bytes32], max_items: int) -> set[bytes32]: """ Adds subscriptions until max_items is reached. Filters out duplicates and returns all additions. """ subscription_count = self.peer_subscription_count(peer_id) - added: Set[bytes32] = set() + added: set[bytes32] = set() - def limit_reached() -> Set[bytes32]: + def limit_reached() -> set[bytes32]: log.info( "Peer %s attempted to exceed the subscription limit while adding puzzle subscriptions.", peer_id, @@ -121,15 +120,15 @@ def limit_reached() -> Set[bytes32]: return added - def add_coin_subscriptions(self, peer_id: bytes32, coin_ids: List[bytes32], max_items: int) -> Set[bytes32]: + def add_coin_subscriptions(self, peer_id: bytes32, coin_ids: list[bytes32], max_items: int) -> set[bytes32]: """ Adds subscriptions until max_items is reached. Filters out duplicates and returns all additions. """ subscription_count = self.peer_subscription_count(peer_id) - added: Set[bytes32] = set() + added: set[bytes32] = set() - def limit_reached() -> Set[bytes32]: + def limit_reached() -> set[bytes32]: log.info( "Peer %s attempted to exceed the subscription limit while adding coin subscriptions.", peer_id, @@ -155,12 +154,12 @@ def limit_reached() -> Set[bytes32]: return added - def remove_puzzle_subscriptions(self, peer_id: bytes32, puzzle_hashes: List[bytes32]) -> Set[bytes32]: + def remove_puzzle_subscriptions(self, peer_id: bytes32, puzzle_hashes: list[bytes32]) -> set[bytes32]: """ Removes subscriptions. Filters out duplicates and returns all removals. """ - removed: Set[bytes32] = set() + removed: set[bytes32] = set() for puzzle_hash in puzzle_hashes: if not self._puzzle_subscriptions.remove_subscription(peer_id, puzzle_hash): @@ -170,12 +169,12 @@ def remove_puzzle_subscriptions(self, peer_id: bytes32, puzzle_hashes: List[byte return removed - def remove_coin_subscriptions(self, peer_id: bytes32, coin_ids: List[bytes32]) -> Set[bytes32]: + def remove_coin_subscriptions(self, peer_id: bytes32, coin_ids: list[bytes32]) -> set[bytes32]: """ Removes subscriptions. Filters out duplicates and returns all removals. """ - removed: Set[bytes32] = set() + removed: set[bytes32] = set() for coin_id in coin_ids: if not self._coin_subscriptions.remove_subscription(peer_id, coin_id): @@ -195,16 +194,16 @@ def remove_peer(self, peer_id: bytes32) -> None: self._puzzle_subscriptions.remove_peer(peer_id) self._coin_subscriptions.remove_peer(peer_id) - def coin_subscriptions(self, peer_id: bytes32) -> Set[bytes32]: + def coin_subscriptions(self, peer_id: bytes32) -> set[bytes32]: return self._coin_subscriptions.subscriptions(peer_id) - def puzzle_subscriptions(self, peer_id: bytes32) -> Set[bytes32]: + def puzzle_subscriptions(self, peer_id: bytes32) -> set[bytes32]: return self._puzzle_subscriptions.subscriptions(peer_id) - def peers_for_coin_id(self, coin_id: bytes32) -> Set[bytes32]: + def peers_for_coin_id(self, coin_id: bytes32) -> set[bytes32]: return self._coin_subscriptions.peers(coin_id) - def peers_for_puzzle_hash(self, puzzle_hash: bytes32) -> Set[bytes32]: + def peers_for_puzzle_hash(self, puzzle_hash: bytes32) -> set[bytes32]: return self._puzzle_subscriptions.peers(puzzle_hash) def coin_subscription_count(self) -> int: @@ -215,16 +214,16 @@ def puzzle_subscription_count(self) -> int: def peers_for_spend_bundle( - peer_subscriptions: PeerSubscriptions, conds: SpendBundleConditions, hints_for_removals: Set[bytes32] -) -> Set[bytes32]: + peer_subscriptions: PeerSubscriptions, conds: SpendBundleConditions, hints_for_removals: set[bytes32] +) -> set[bytes32]: """ Returns a list of peer ids that are subscribed to any of the created or spent coins, puzzle hashes, or hints in the spend bundle. To avoid repeated lookups, `hints_for_removals` should be a set of all puzzle hashes that are being removed. """ - coin_ids: Set[bytes32] = set() - puzzle_hashes: Set[bytes32] = hints_for_removals.copy() + coin_ids: set[bytes32] = set() + puzzle_hashes: set[bytes32] = hints_for_removals.copy() for spend in conds.spends: coin_ids.add(bytes32(spend.coin_id)) @@ -237,7 +236,7 @@ def peers_for_spend_bundle( if memo is not None and len(memo) == 32: puzzle_hashes.add(bytes32(memo)) - peers: Set[bytes32] = set() + peers: set[bytes32] = set() for coin_id in coin_ids: peers |= peer_subscriptions.peers_for_coin_id(coin_id) diff --git a/chia/full_node/sync_store.py b/chia/full_node/sync_store.py index 44fff8421afd..400a2e75fc5a 100644 --- a/chia/full_node/sync_store.py +++ b/chia/full_node/sync_store.py @@ -3,9 +3,10 @@ import asyncio import collections import logging +from collections import OrderedDict from collections import OrderedDict as orderedDict from dataclasses import dataclass, field -from typing import Dict, List, Optional, OrderedDict, Set +from typing import Optional import typing_extensions @@ -29,14 +30,14 @@ class SyncStore: sync_mode: bool = False long_sync: bool = False # Header hash : peer node id - peak_to_peer: OrderedDict[bytes32, Set[bytes32]] = field(default_factory=orderedDict) + peak_to_peer: OrderedDict[bytes32, set[bytes32]] = field(default_factory=orderedDict) # peer node id : Peak - peer_to_peak: Dict[bytes32, Peak] = field(default_factory=dict) + peer_to_peak: dict[bytes32, Peak] = field(default_factory=dict) # Peak we are syncing towards target_peak: Optional[Peak] = None peers_changed: asyncio.Event = field(default_factory=asyncio.Event) # Set of nodes which we are batch syncing from - batch_syncing: Set[bytes32] = field(default_factory=set) + batch_syncing: set[bytes32] = field(default_factory=set) # Set of nodes which we are backtrack syncing from, and how many threads _backtrack_syncing: collections.defaultdict[bytes32, int] = field( default_factory=lambda: collections.defaultdict(int), @@ -79,19 +80,19 @@ def peer_has_block( if new_peak: self.peer_to_peak[peer_id] = Peak(header_hash, height, weight) - def get_peers_that_have_peak(self, header_hashes: List[bytes32]) -> Set[bytes32]: + def get_peers_that_have_peak(self, header_hashes: list[bytes32]) -> set[bytes32]: """ Returns: peer ids of peers that have at least one of the header hashes. """ - node_ids: Set[bytes32] = set() + node_ids: set[bytes32] = set() for header_hash in header_hashes: if header_hash in self.peak_to_peer: for node_id in self.peak_to_peer[header_hash]: node_ids.add(node_id) return node_ids - def get_peak_of_each_peer(self) -> Dict[bytes32, Peak]: + def get_peak_of_each_peer(self) -> dict[bytes32, Peak]: """ Returns: dictionary of peer id to peak information. """ diff --git a/chia/full_node/tx_processing_queue.py b/chia/full_node/tx_processing_queue.py index cbd7610ddebc..6a3b2710515a 100644 --- a/chia/full_node/tx_processing_queue.py +++ b/chia/full_node/tx_processing_queue.py @@ -4,7 +4,7 @@ import logging from dataclasses import dataclass from queue import SimpleQueue -from typing import Dict, List, Optional +from typing import Optional from chia.types.blockchain_format.sized_bytes import bytes32 from chia.types.transaction_queue_entry import TransactionQueueEntry @@ -25,8 +25,8 @@ class TransactionQueue: _list_cursor: int # this is which index _queue_length: asyncio.Semaphore - _index_to_peer_map: List[bytes32] - _queue_dict: Dict[bytes32, SimpleQueue[TransactionQueueEntry]] + _index_to_peer_map: list[bytes32] + _queue_dict: dict[bytes32, SimpleQueue[TransactionQueueEntry]] _high_priority_queue: SimpleQueue[TransactionQueueEntry] peer_size_limit: int log: logging.Logger diff --git a/chia/full_node/weight_proof.py b/chia/full_node/weight_proof.py index c5fd2b95ad03..f94309712731 100644 --- a/chia/full_node/weight_proof.py +++ b/chia/full_node/weight_proof.py @@ -8,7 +8,7 @@ import tempfile from concurrent.futures.process import ProcessPoolExecutor from multiprocessing.context import BaseContext -from typing import IO, Dict, List, Optional, Tuple +from typing import IO, Optional from chia.consensus.block_header_validation import validate_finished_header_block from chia.consensus.block_record import BlockRecord @@ -92,8 +92,8 @@ async def get_proof_of_weight(self, tip: bytes32) -> Optional[WeightProof]: self.tip = tip return wp - def get_sub_epoch_data(self, tip_height: uint32, summary_heights: List[uint32]) -> List[SubEpochData]: - sub_epoch_data: List[SubEpochData] = [] + def get_sub_epoch_data(self, tip_height: uint32, summary_heights: list[uint32]) -> list[SubEpochData]: + sub_epoch_data: list[SubEpochData] = [] for sub_epoch_n, ses_height in enumerate(summary_heights): if ses_height > tip_height: break @@ -107,7 +107,7 @@ async def _create_proof_of_weight(self, tip: bytes32) -> Optional[WeightProof]: Creates a weight proof object """ assert self.blockchain is not None - sub_epoch_segments: List[SubEpochChallengeSegment] = [] + sub_epoch_segments: list[SubEpochChallengeSegment] = [] tip_rec = self.blockchain.try_block_record(tip) if tip_rec is None: log.error("failed not tip in cache") @@ -164,7 +164,7 @@ async def _create_proof_of_weight(self, tip: bytes32) -> Optional[WeightProof]: log.debug(f"sub_epochs: {len(sub_epoch_data)}") return WeightProof(sub_epoch_data, sub_epoch_segments, recent_chain) - def get_seed_for_proof(self, summary_heights: List[uint32], tip_height: uint32) -> bytes32: + def get_seed_for_proof(self, summary_heights: list[uint32], tip_height: uint32) -> bytes32: count = 0 ses = None for sub_epoch_n, ses_height in enumerate(reversed(summary_heights)): @@ -177,8 +177,8 @@ def get_seed_for_proof(self, summary_heights: List[uint32], tip_height: uint32) seed = ses.get_hash() return seed - async def _get_recent_chain(self, tip_height: uint32) -> Optional[List[HeaderBlock]]: - recent_chain: List[HeaderBlock] = [] + async def _get_recent_chain(self, tip_height: uint32) -> Optional[list[HeaderBlock]]: + recent_chain: list[HeaderBlock] = [] ses_heights = self.blockchain.get_ses_heights() min_height = 0 count_ses = 0 @@ -288,8 +288,8 @@ async def __create_persist_segment( async def __create_sub_epoch_segments( self, ses_block: BlockRecord, se_start: BlockRecord, sub_epoch_n: uint32 - ) -> Optional[List[SubEpochChallengeSegment]]: - segments: List[SubEpochChallengeSegment] = [] + ) -> Optional[list[SubEpochChallengeSegment]]: + segments: list[SubEpochChallengeSegment] = [] start_height = await self.get_prev_two_slots_height(se_start) blocks = await self.blockchain.get_block_records_in_range( @@ -345,12 +345,12 @@ async def _create_challenge_segment( self, header_block: HeaderBlock, sub_epoch_n: uint32, - header_blocks: Dict[bytes32, HeaderBlock], - blocks: Dict[bytes32, BlockRecord], + header_blocks: dict[bytes32, HeaderBlock], + blocks: dict[bytes32, BlockRecord], first_segment_in_sub_epoch: bool, - ) -> Tuple[Optional[SubEpochChallengeSegment], uint32]: + ) -> tuple[Optional[SubEpochChallengeSegment], uint32]: assert self.blockchain is not None - sub_slots: List[SubSlotData] = [] + sub_slots: list[SubSlotData] = [] log.debug(f"create challenge segment block {header_block.header_hash} block height {header_block.height} ") # VDFs from sub slots before challenge block first_sub_slots, first_rc_end_of_slot_vdf = await self.__first_sub_slot_vdfs( @@ -392,10 +392,10 @@ async def _create_challenge_segment( async def __first_sub_slot_vdfs( self, header_block: HeaderBlock, - header_blocks: Dict[bytes32, HeaderBlock], - blocks: Dict[bytes32, BlockRecord], + header_blocks: dict[bytes32, HeaderBlock], + blocks: dict[bytes32, BlockRecord], first_in_sub_epoch: bool, - ) -> Tuple[Optional[List[SubSlotData]], Optional[VDFInfo]]: + ) -> tuple[Optional[list[SubSlotData]], Optional[VDFInfo]]: # combine cc vdfs of all reward blocks from the start of the sub slot to end header_block_sub_rec = blocks[header_block.header_hash] # find slot start @@ -418,8 +418,8 @@ async def __first_sub_slot_vdfs( curr_sub_rec = blocks[curr_sub_rec.prev_hash] curr = header_blocks[curr_sub_rec.header_hash] - sub_slots_data: List[SubSlotData] = [] - tmp_sub_slots_data: List[SubSlotData] = [] + sub_slots_data: list[SubSlotData] = [] + tmp_sub_slots_data: list[SubSlotData] = [] while curr.height < header_block.height: if curr is None: log.error("failed fetching block") @@ -469,8 +469,8 @@ async def __first_sub_slot_vdfs( def first_rc_end_of_slot_vdf( self, header_block: HeaderBlock, - blocks: Dict[bytes32, BlockRecord], - header_blocks: Dict[bytes32, HeaderBlock], + blocks: dict[bytes32, BlockRecord], + header_blocks: dict[bytes32, HeaderBlock], ) -> Optional[VDFInfo]: curr = blocks[header_block.header_hash] while curr.height > 0 and not curr.sub_epoch_summary_included: @@ -478,16 +478,16 @@ def first_rc_end_of_slot_vdf( return header_blocks[curr.header_hash].finished_sub_slots[-1].reward_chain.end_of_slot_vdf async def __slot_end_vdf( - self, start_height: uint32, header_blocks: Dict[bytes32, HeaderBlock], blocks: Dict[bytes32, BlockRecord] - ) -> Tuple[Optional[List[SubSlotData]], uint32]: + self, start_height: uint32, header_blocks: dict[bytes32, HeaderBlock], blocks: dict[bytes32, BlockRecord] + ) -> tuple[Optional[list[SubSlotData]], uint32]: # gets all vdfs first sub slot after challenge block to last sub slot log.debug(f"slot end vdf start height {start_height}") header_hash = self.blockchain.height_to_hash(start_height) assert header_hash is not None curr = header_blocks[header_hash] curr_header_hash = curr.header_hash - sub_slots_data: List[SubSlotData] = [] - tmp_sub_slots_data: List[SubSlotData] = [] + sub_slots_data: list[SubSlotData] = [] + tmp_sub_slots_data: list[SubSlotData] = [] while not blocks[curr_header_hash].is_challenge_block(self.constants): if curr.first_in_sub_slot: sub_slots_data.extend(tmp_sub_slots_data) @@ -512,7 +512,7 @@ async def __slot_end_vdf( log.debug(f"slot end vdf end height {curr.height} slots {len(sub_slots_data)} ") return sub_slots_data, curr.height - def handle_block_vdfs(self, curr: HeaderBlock, blocks: Dict[bytes32, BlockRecord]) -> SubSlotData: + def handle_block_vdfs(self, curr: HeaderBlock, blocks: dict[bytes32, BlockRecord]) -> SubSlotData: cc_sp_proof = None icc_ip_proof = None cc_sp_info = None @@ -558,7 +558,7 @@ def handle_block_vdfs(self, curr: HeaderBlock, blocks: Dict[bytes32, BlockRecord curr.total_iters, ) - def validate_weight_proof_single_proc(self, weight_proof: WeightProof) -> Tuple[bool, uint32]: + def validate_weight_proof_single_proc(self, weight_proof: WeightProof) -> tuple[bool, uint32]: assert self.blockchain is not None assert len(weight_proof.sub_epochs) > 0 if len(weight_proof.sub_epochs) == 0: @@ -588,7 +588,7 @@ def validate_weight_proof_single_proc(self, weight_proof: WeightProof) -> Tuple[ fork_point, _ = self.get_fork_point(summaries) return True, fork_point - async def validate_weight_proof(self, weight_proof: WeightProof) -> Tuple[bool, uint32, List[SubEpochSummary]]: + async def validate_weight_proof(self, weight_proof: WeightProof) -> tuple[bool, uint32, list[SubEpochSummary]]: assert self.blockchain is not None if len(weight_proof.sub_epochs) == 0: return False, uint32(0), [] @@ -629,7 +629,7 @@ async def validate_weight_proof(self, weight_proof: WeightProof) -> Tuple[bool, valid, _ = await task return valid, fork_point, summaries - def get_fork_point(self, received_summaries: List[SubEpochSummary]) -> Tuple[uint32, int]: + def get_fork_point(self, received_summaries: list[SubEpochSummary]) -> tuple[uint32, int]: # returns the fork height and ses index # iterate through sub epoch summaries to find fork point fork_point_index = 0 @@ -653,8 +653,8 @@ def get_fork_point(self, received_summaries: List[SubEpochSummary]) -> Tuple[uin def _get_weights_for_sampling( - rng: random.Random, total_weight: uint128, recent_chain: List[HeaderBlock] -) -> Optional[List[uint128]]: + rng: random.Random, total_weight: uint128, recent_chain: list[HeaderBlock] +) -> Optional[list[uint128]]: weight_to_check = [] last_l_weight = recent_chain[-1].reward_chain_block.weight - recent_chain[0].reward_chain_block.weight delta = last_l_weight / total_weight @@ -675,7 +675,7 @@ def _get_weights_for_sampling( def _sample_sub_epoch( start_of_epoch_weight: uint128, end_of_epoch_weight: uint128, - weight_to_check: Optional[List[uint128]], + weight_to_check: Optional[list[uint128]], ) -> bool: """ weight_to_check: List[uint128] is expected to be sorted @@ -717,7 +717,7 @@ async def _challenge_block_vdfs( constants: ConsensusConstants, header_block: HeaderBlock, block_rec: BlockRecord, - sub_blocks: Dict[bytes32, BlockRecord], + sub_blocks: dict[bytes32, BlockRecord], ) -> SubSlotData: (_, _, _, _, cc_vdf_iters, _) = get_signage_point_vdf_info( constants, @@ -828,7 +828,7 @@ def handle_end_of_slot( def _validate_sub_epoch_summaries( constants: ConsensusConstants, weight_proof: WeightProof, -) -> Tuple[Optional[List[SubEpochSummary]], Optional[List[uint128]]]: +) -> tuple[Optional[list[SubEpochSummary]], Optional[list[uint128]]]: last_ses_hash, last_ses_sub_height = _get_last_ses_hash(constants, weight_proof.recent_chain_data) if last_ses_hash is None: log.warning("could not find last ses block") @@ -861,12 +861,12 @@ def _validate_sub_epoch_summaries( def _map_sub_epoch_summaries( sub_blocks_for_se: uint32, ses_hash: bytes32, - sub_epoch_data: List[SubEpochData], + sub_epoch_data: list[SubEpochData], curr_difficulty: uint64, -) -> Tuple[List[SubEpochSummary], uint128, List[uint128]]: +) -> tuple[list[SubEpochSummary], uint128, list[uint128]]: total_weight: uint128 = uint128(0) - summaries: List[SubEpochSummary] = [] - sub_epoch_weight_list: List[uint128] = [] + summaries: list[SubEpochSummary] = [] + sub_epoch_weight_list: list[uint128] = [] for idx, data in enumerate(sub_epoch_data): ses = SubEpochSummary( ses_hash, @@ -902,7 +902,7 @@ def _map_sub_epoch_summaries( def _validate_summaries_weight( constants: ConsensusConstants, sub_epoch_data_weight: uint128, - summaries: List[SubEpochSummary], + summaries: list[SubEpochSummary], weight_proof: WeightProof, ) -> bool: num_over = summaries[-1].num_blocks_overflow @@ -921,10 +921,10 @@ def _validate_sub_epoch_segments( constants: ConsensusConstants, rng: random.Random, weight_proof_bytes: bytes, - summaries_bytes: List[bytes], + summaries_bytes: list[bytes], height: uint32, validate_from: int = 0, -) -> Optional[List[Tuple[VDFProof, ClassgroupElement, VDFInfo]]]: +) -> Optional[list[tuple[VDFProof, ClassgroupElement, VDFInfo]]]: summaries = summaries_from_bytes(summaries_bytes) sub_epoch_segments: SubEpochSegments = SubEpochSegments.from_bytes(weight_proof_bytes) rc_sub_slot_hash = constants.GENESIS_CHALLENGE @@ -987,7 +987,7 @@ def _validate_segment( first_segment_in_se: bool, sampled: bool, height: uint32, -) -> Tuple[bool, int, int, int, List[Tuple[VDFProof, ClassgroupElement, VDFInfo]]]: +) -> tuple[bool, int, int, int, list[tuple[VDFProof, ClassgroupElement, VDFInfo]]]: ip_iters, slot_iters, slots = 0, 0, 0 after_challenge = False to_validate = [] @@ -1019,9 +1019,9 @@ def _validate_segment( def _get_challenge_block_vdfs( constants: ConsensusConstants, sub_slot_idx: int, - sub_slots: List[SubSlotData], + sub_slots: list[SubSlotData], ssi: uint64, -) -> List[Tuple[VDFProof, ClassgroupElement, VDFInfo]]: +) -> list[tuple[VDFProof, ClassgroupElement, VDFInfo]]: to_validate = [] sub_slot_data = sub_slots[sub_slot_idx] if sub_slot_data.cc_signage_point is not None and sub_slot_data.cc_sp_vdf_info: @@ -1058,9 +1058,9 @@ def _get_challenge_block_vdfs( def _validate_sub_slot_data( constants: ConsensusConstants, sub_slot_idx: int, - sub_slots: List[SubSlotData], + sub_slots: list[SubSlotData], ssi: uint64, -) -> Tuple[bool, List[Tuple[VDFProof, ClassgroupElement, VDFInfo]]]: +) -> tuple[bool, list[tuple[VDFProof, ClassgroupElement, VDFInfo]]]: sub_slot_data = sub_slots[sub_slot_idx] assert sub_slot_idx > 0 prev_ssd = sub_slots[sub_slot_idx - 1] @@ -1134,7 +1134,7 @@ def sub_slot_data_vdf_input( constants: ConsensusConstants, sub_slot_data: SubSlotData, sub_slot_idx: int, - sub_slots: List[SubSlotData], + sub_slots: list[SubSlotData], is_overflow: bool, new_sub_slot: bool, ssi: uint64, @@ -1198,9 +1198,9 @@ def sub_slot_data_vdf_input( def validate_recent_blocks( constants: ConsensusConstants, recent_chain_bytes: bytes, - summaries_bytes: List[bytes], + summaries_bytes: list[bytes], shutdown_file_path: Optional[pathlib.Path] = None, -) -> Tuple[bool, List[bytes]]: +) -> tuple[bool, list[bytes]]: recent_chain: RecentChainData = RecentChainData.from_bytes(recent_chain_bytes) summaries = summaries_from_bytes(summaries_bytes) sub_blocks = BlockCache({}) @@ -1384,7 +1384,7 @@ def __validate_pospace( def __get_rc_sub_slot( constants: ConsensusConstants, segment: SubEpochChallengeSegment, - summaries: List[SubEpochSummary], + summaries: list[SubEpochSummary], curr_ssi: uint64, ) -> RewardChainSubSlot: ses = summaries[uint32(segment.sub_epoch_n - 1)] @@ -1465,7 +1465,7 @@ def __get_rc_sub_slot( return rc_sub_slot -def __get_cc_sub_slot(sub_slots: List[SubSlotData], idx: int, ses: Optional[SubEpochSummary]) -> ChallengeChainSubSlot: +def __get_cc_sub_slot(sub_slots: list[SubSlotData], idx: int, ses: Optional[SubEpochSummary]) -> ChallengeChainSubSlot: sub_slot: Optional[SubSlotData] = None for i in reversed(range(0, idx)): sub_slot = sub_slots[i] @@ -1491,8 +1491,8 @@ def __get_cc_sub_slot(sub_slots: List[SubSlotData], idx: int, ses: Optional[SubE def _get_curr_diff_ssi( - constants: ConsensusConstants, idx: int, summaries: List[SubEpochSummary] -) -> Tuple[uint64, uint64]: + constants: ConsensusConstants, idx: int, summaries: list[SubEpochSummary] +) -> tuple[uint64, uint64]: curr_difficulty = constants.DIFFICULTY_STARTING curr_ssi = constants.SUB_SLOT_ITERS_STARTING for ses in reversed(summaries[0:idx]): @@ -1505,7 +1505,7 @@ def _get_curr_diff_ssi( return curr_difficulty, curr_ssi -def vars_to_bytes(summaries: List[SubEpochSummary], weight_proof: WeightProof) -> Tuple[List[bytes], bytes, bytes]: +def vars_to_bytes(summaries: list[SubEpochSummary], weight_proof: WeightProof) -> tuple[list[bytes], bytes, bytes]: wp_recent_chain_bytes = bytes(RecentChainData(weight_proof.recent_chain_data)) wp_segment_bytes = bytes(SubEpochSegments(weight_proof.sub_epoch_segments)) summary_bytes = [] @@ -1514,7 +1514,7 @@ def vars_to_bytes(summaries: List[SubEpochSummary], weight_proof: WeightProof) - return summary_bytes, wp_segment_bytes, wp_recent_chain_bytes -def summaries_from_bytes(summaries_bytes: List[bytes]) -> List[SubEpochSummary]: +def summaries_from_bytes(summaries_bytes: list[bytes]) -> list[SubEpochSummary]: summaries = [] for summary in summaries_bytes: summaries.append(SubEpochSummary.from_bytes(summary)) @@ -1522,8 +1522,8 @@ def summaries_from_bytes(summaries_bytes: List[bytes]) -> List[SubEpochSummary]: def _get_last_ses_hash( - constants: ConsensusConstants, recent_reward_chain: List[HeaderBlock] -) -> Tuple[Optional[bytes32], uint32]: + constants: ConsensusConstants, recent_reward_chain: list[HeaderBlock] +) -> tuple[Optional[bytes32], uint32]: for idx, block in enumerate(reversed(recent_reward_chain)): if (block.reward_chain_block.height % constants.SUB_EPOCH_BLOCKS) == 0: idx = len(recent_reward_chain) - 1 - idx # reverse @@ -1541,8 +1541,8 @@ def _get_last_ses_hash( return None, uint32(0) -def _get_ses_idx(recent_reward_chain: List[HeaderBlock]) -> List[int]: - idxs: List[int] = [] +def _get_ses_idx(recent_reward_chain: list[HeaderBlock]) -> list[int]: + idxs: list[int] = [] for idx, curr in enumerate(recent_reward_chain): if len(curr.finished_sub_slots) > 0: for slot in curr.finished_sub_slots: @@ -1591,11 +1591,11 @@ def blue_boxed_end_of_slot(sub_slot: EndOfSubSlotBundle) -> bool: def validate_sub_epoch_sampling( - rng: random.Random, sub_epoch_weight_list: List[uint128], weight_proof: WeightProof + rng: random.Random, sub_epoch_weight_list: list[uint128], weight_proof: WeightProof ) -> bool: tip = weight_proof.recent_chain_data[-1] weight_to_check = _get_weights_for_sampling(rng, tip.weight, weight_proof.recent_chain_data) - sampled_sub_epochs: Dict[int, bool] = {} + sampled_sub_epochs: dict[int, bool] = {} for idx in range(1, len(sub_epoch_weight_list)): if _sample_sub_epoch(sub_epoch_weight_list[idx - 1], sub_epoch_weight_list[idx], weight_to_check): sampled_sub_epochs[idx - 1] = True @@ -1613,9 +1613,9 @@ def validate_sub_epoch_sampling( def map_segments_by_sub_epoch( - sub_epoch_segments: List[SubEpochChallengeSegment], -) -> Dict[int, List[SubEpochChallengeSegment]]: - segments: Dict[int, List[SubEpochChallengeSegment]] = {} + sub_epoch_segments: list[SubEpochChallengeSegment], +) -> dict[int, list[SubEpochChallengeSegment]]: + segments: dict[int, list[SubEpochChallengeSegment]] = {} curr_sub_epoch_n = -1 for idx, segment in enumerate(sub_epoch_segments): if curr_sub_epoch_n < segment.sub_epoch_n: @@ -1627,7 +1627,7 @@ def map_segments_by_sub_epoch( def _validate_vdf_batch( constants: ConsensusConstants, - vdf_list: List[Tuple[bytes, bytes, bytes]], + vdf_list: list[tuple[bytes, bytes, bytes]], shutdown_file_path: Optional[pathlib.Path] = None, ) -> bool: for vdf_proof_bytes, class_group_bytes, info in vdf_list: @@ -1650,11 +1650,11 @@ async def validate_weight_proof_inner( shutdown_file_name: str, num_processes: int, weight_proof: WeightProof, - summaries: List[SubEpochSummary], - sub_epoch_weight_list: List[uint128], + summaries: list[SubEpochSummary], + sub_epoch_weight_list: list[uint128], skip_segment_validation: bool, validate_from: int, -) -> Tuple[bool, List[BlockRecord]]: +) -> tuple[bool, list[BlockRecord]]: assert len(weight_proof.sub_epochs) > 0 if len(weight_proof.sub_epochs) == 0: return False, [] diff --git a/chia/harvester/harvester.py b/chia/harvester/harvester.py index 345ffbea0c86..96952cfc9a85 100644 --- a/chia/harvester/harvester.py +++ b/chia/harvester/harvester.py @@ -5,9 +5,10 @@ import contextlib import dataclasses import logging +from collections.abc import AsyncIterator from concurrent.futures.thread import ThreadPoolExecutor from pathlib import Path -from typing import TYPE_CHECKING, Any, AsyncIterator, ClassVar, Dict, List, Optional, Tuple, cast +from typing import TYPE_CHECKING, Any, ClassVar, Optional, cast from typing_extensions import Literal @@ -71,7 +72,7 @@ def server(self) -> ChiaServer: return self._server - def __init__(self, root_path: Path, config: Dict[str, Any], constants: ConsensusConstants): + def __init__(self, root_path: Path, config: dict[str, Any], constants: ConsensusConstants): self.log = log self.root_path = root_path # TODO, remove checks below later after some versions / time @@ -145,7 +146,7 @@ async def manage(self) -> AsyncIterator[None]: await self.plot_sync_sender.await_closed() - def get_connections(self, request_node_type: Optional[NodeType]) -> List[Dict[str, Any]]: + def get_connections(self, request_node_type: Optional[NodeType]) -> list[dict[str, Any]]: return default_get_connections(server=self.server, request_node_type=request_node_type) async def on_connect(self, connection: WSChiaConnection) -> None: @@ -154,7 +155,7 @@ async def on_connect(self, connection: WSChiaConnection) -> None: def _set_state_changed_callback(self, callback: StateChangedProtocol) -> None: self.state_changed_callback = callback - def state_changed(self, change: str, change_data: Optional[Dict[str, Any]] = None) -> None: + def state_changed(self, change: str, change_data: Optional[dict[str, Any]] = None) -> None: if self.state_changed_callback is not None: self.state_changed_callback(change, change_data) @@ -181,9 +182,9 @@ async def on_disconnect(self, connection: WSChiaConnection) -> None: asyncio.run_coroutine_threadsafe(self.plot_sync_sender.await_closed(), asyncio.get_running_loop()) self.plot_manager.stop_refreshing() - def get_plots(self) -> Tuple[List[Dict[str, Any]], List[str], List[str]]: + def get_plots(self) -> tuple[list[dict[str, Any]], list[str], list[str]]: self.log.debug(f"get_plots prover items: {self.plot_manager.plot_count()}") - response_plots: List[Dict[str, Any]] = [] + response_plots: list[dict[str, Any]] = [] with self.plot_manager: for path, plot_info in self.plot_manager.plots.items(): prover = plot_info.prover @@ -222,7 +223,7 @@ async def add_plot_directory(self, str_path: str) -> bool: self.plot_manager.trigger_refresh() return True - async def get_plot_directories(self) -> List[str]: + async def get_plot_directories(self) -> list[str]: return get_plot_directories(self.root_path) async def remove_plot_directory(self, str_path: str) -> bool: @@ -230,7 +231,7 @@ async def remove_plot_directory(self, str_path: str) -> bool: self.plot_manager.trigger_refresh() return True - async def get_harvester_config(self) -> Dict[str, Any]: + async def get_harvester_config(self) -> dict[str, Any]: return get_harvester_config(self.root_path) async def update_harvester_config( diff --git a/chia/harvester/harvester_api.py b/chia/harvester/harvester_api.py index b8bc2f6151d6..2631aac8281f 100644 --- a/chia/harvester/harvester_api.py +++ b/chia/harvester/harvester_api.py @@ -4,7 +4,7 @@ import logging import time from pathlib import Path -from typing import List, Optional, Tuple +from typing import Optional from chia_rs import AugSchemeMPL, G1Element, G2Element @@ -87,7 +87,7 @@ async def new_signage_point_harvester( loop = asyncio.get_running_loop() - def blocking_lookup(filename: Path, plot_info: PlotInfo) -> List[Tuple[bytes32, ProofOfSpace]]: + def blocking_lookup(filename: Path, plot_info: PlotInfo) -> list[tuple[bytes32, ProofOfSpace]]: # Uses the DiskProver object to lookup qualities. This is a blocking call, # so it should be run in a thread pool. try: @@ -123,7 +123,7 @@ def blocking_lookup(filename: Path, plot_info: PlotInfo) -> List[Tuple[bytes32, ) return [] - responses: List[Tuple[bytes32, ProofOfSpace]] = [] + responses: list[tuple[bytes32, ProofOfSpace]] = [] if quality_strings is not None: difficulty = new_challenge.difficulty sub_slot_iters = new_challenge.sub_slot_iters @@ -205,12 +205,12 @@ def blocking_lookup(filename: Path, plot_info: PlotInfo) -> List[Tuple[bytes32, async def lookup_challenge( filename: Path, plot_info: PlotInfo - ) -> Tuple[Path, List[harvester_protocol.NewProofOfSpace]]: + ) -> tuple[Path, list[harvester_protocol.NewProofOfSpace]]: # Executes a DiskProverLookup in a thread pool, and returns responses - all_responses: List[harvester_protocol.NewProofOfSpace] = [] + all_responses: list[harvester_protocol.NewProofOfSpace] = [] if self.harvester._shut_down: return filename, [] - proofs_of_space_and_q: List[Tuple[bytes32, ProofOfSpace]] = await loop.run_in_executor( + proofs_of_space_and_q: list[tuple[bytes32, ProofOfSpace]] = await loop.run_in_executor( self.harvester.executor, blocking_lookup, filename, plot_info ) for quality_str, proof_of_space in proofs_of_space_and_q: @@ -329,7 +329,7 @@ async def request_signatures(self, request: harvester_protocol.RequestSignatures # This is only a partial signature. When combined with the farmer's half, it will # form a complete PrependSignature. - message_signatures: List[Tuple[bytes32, G2Element]] = [] + message_signatures: list[tuple[bytes32, G2Element]] = [] for message in request.messages: signature: G2Element = AugSchemeMPL.sign(local_sk, message, agg_pk) message_signatures.append((message, signature)) diff --git a/chia/introducer/introducer.py b/chia/introducer/introducer.py index f97ce6b21216..d2142ef5ca26 100644 --- a/chia/introducer/introducer.py +++ b/chia/introducer/introducer.py @@ -4,7 +4,8 @@ import contextlib import logging import time -from typing import TYPE_CHECKING, Any, AsyncIterator, ClassVar, Dict, List, Optional, cast +from collections.abc import AsyncIterator +from typing import TYPE_CHECKING, Any, ClassVar, Optional, cast from chia.rpc.rpc_server import StateChangedProtocol, default_get_connections from chia.server.introducer_peers import VettedPeer @@ -53,7 +54,7 @@ def _set_state_changed_callback(self, callback: StateChangedProtocol) -> None: # TODO: fill this out? pass - def get_connections(self, request_node_type: Optional[NodeType]) -> List[Dict[str, Any]]: + def get_connections(self, request_node_type: Optional[NodeType]) -> list[dict[str, Any]]: return default_get_connections(server=self.server, request_node_type=request_node_type) def set_server(self, server: ChiaServer): diff --git a/chia/legacy/keyring.py b/chia/legacy/keyring.py index 8630dc61805e..2f6e7c2a32bd 100644 --- a/chia/legacy/keyring.py +++ b/chia/legacy/keyring.py @@ -6,7 +6,7 @@ from __future__ import annotations import sys -from typing import Callable, List, Union, cast +from typing import Callable, Union, cast import click from chia_rs import G1Element @@ -85,8 +85,8 @@ def get_key_data(keyring: LegacyKeyring, index: int) -> KeyData: ) -def get_keys(keyring: LegacyKeyring) -> List[KeyData]: - keys: List[KeyData] = [] +def get_keys(keyring: LegacyKeyring) -> list[KeyData]: + keys: list[KeyData] = [] for index in range(MAX_KEYS): try: keys.append(get_key_data(keyring, index)) diff --git a/chia/plot_sync/delta.py b/chia/plot_sync/delta.py index ff72407447fc..bf1c925bd293 100644 --- a/chia/plot_sync/delta.py +++ b/chia/plot_sync/delta.py @@ -1,15 +1,15 @@ from __future__ import annotations from dataclasses import dataclass, field -from typing import Dict, List, Union +from typing import Union from chia.protocols.harvester_protocol import Plot @dataclass class DeltaType: - additions: Union[Dict[str, Plot], List[str]] - removals: List[str] + additions: Union[dict[str, Plot], list[str]] + removals: list[str] def __str__(self) -> str: return f"+{len(self.additions)}/-{len(self.removals)}" @@ -24,17 +24,17 @@ def empty(self) -> bool: @dataclass class PlotListDelta(DeltaType): - additions: Dict[str, Plot] = field(default_factory=dict) - removals: List[str] = field(default_factory=list) + additions: dict[str, Plot] = field(default_factory=dict) + removals: list[str] = field(default_factory=list) @dataclass class PathListDelta(DeltaType): - additions: List[str] = field(default_factory=list) - removals: List[str] = field(default_factory=list) + additions: list[str] = field(default_factory=list) + removals: list[str] = field(default_factory=list) @staticmethod - def from_lists(old: List[str], new: List[str]) -> PathListDelta: + def from_lists(old: list[str], new: list[str]) -> PathListDelta: return PathListDelta([x for x in new if x not in old], [x for x in old if x not in new]) diff --git a/chia/plot_sync/receiver.py b/chia/plot_sync/receiver.py index 992a8f2af239..0379a446426a 100644 --- a/chia/plot_sync/receiver.py +++ b/chia/plot_sync/receiver.py @@ -2,8 +2,9 @@ import logging import time +from collections.abc import Awaitable, Collection, Sequence from dataclasses import dataclass, field -from typing import Any, Awaitable, Callable, Collection, Dict, List, Optional, Sequence, Union +from typing import Any, Callable, Optional, Union from typing_extensions import Protocol @@ -82,10 +83,10 @@ class Receiver: _connection: WSChiaConnection _current_sync: Sync _last_sync: Sync - _plots: Dict[str, Plot] - _invalid: List[str] - _keys_missing: List[str] - _duplicates: List[str] + _plots: dict[str, Plot] + _invalid: list[str] + _keys_missing: list[str] + _duplicates: list[str] _total_plot_size: int _total_effective_plot_size: int _update_callback: ReceiverUpdateCallback @@ -138,16 +139,16 @@ def last_sync(self) -> Sync: def initial_sync(self) -> bool: return self._last_sync.sync_id == 0 - def plots(self) -> Dict[str, Plot]: + def plots(self) -> dict[str, Plot]: return self._plots - def invalid(self) -> List[str]: + def invalid(self) -> list[str]: return self._invalid - def keys_missing(self) -> List[str]: + def keys_missing(self) -> list[str]: return self._keys_missing - def duplicates(self) -> List[str]: + def duplicates(self) -> list[str]: return self._duplicates def total_plot_size(self) -> int: @@ -245,7 +246,7 @@ async def process_path_list( state: State, next_state: State, target: Collection[str], - delta: List[str], + delta: list[str], paths: PlotSyncPathList, is_removal: bool = False, ) -> None: @@ -359,7 +360,7 @@ async def _sync_done(self, data: PlotSyncDone) -> None: async def sync_done(self, data: PlotSyncDone) -> None: await self._process(self._sync_done, ProtocolMessageTypes.plot_sync_done, data) - def to_dict(self, counts_only: bool = False) -> Dict[str, Any]: + def to_dict(self, counts_only: bool = False) -> dict[str, Any]: syncing = None if self._current_sync.in_progress(): syncing = { diff --git a/chia/plot_sync/sender.py b/chia/plot_sync/sender.py index ffee30e5ae65..e2a2fbccf99f 100644 --- a/chia/plot_sync/sender.py +++ b/chia/plot_sync/sender.py @@ -4,9 +4,10 @@ import logging import time import traceback +from collections.abc import Iterable from dataclasses import dataclass from pathlib import Path -from typing import Any, Generic, Iterable, List, Optional, Tuple, Type, TypeVar +from typing import Any, Generic, Optional, TypeVar from typing_extensions import Protocol @@ -32,8 +33,8 @@ log = logging.getLogger(__name__) -def _convert_plot_info_list(plot_infos: List[PlotInfo]) -> List[Plot]: - converted: List[Plot] = [] +def _convert_plot_info_list(plot_infos: list[PlotInfo]) -> list[Plot]: + converted: list[Plot] = [] for plot_info in plot_infos: converted.append( Plot( @@ -66,10 +67,10 @@ class MessageGenerator(Generic[T]): sync_id: uint64 message_type: ProtocolMessageTypes message_id: uint64 - payload_type: Type[T] + payload_type: type[T] args: Iterable[object] - def generate(self) -> Tuple[PlotSyncIdentifier, T]: + def generate(self) -> tuple[PlotSyncIdentifier, T]: identifier = PlotSyncIdentifier(uint64(int(time.time())), self.sync_id, self.message_id) payload = self.payload_type(identifier, *self.args) return identifier, payload @@ -93,7 +94,7 @@ class Sender: _connection: Optional[WSChiaConnection] _sync_id: uint64 _next_message_id: uint64 - _messages: List[MessageGenerator[PayloadType]] + _messages: list[MessageGenerator[PayloadType]] _last_sync_id: uint64 _stop_requested = False _task: Optional[asyncio.Task[None]] @@ -249,7 +250,7 @@ def failed(message: str) -> bool: return True - def _add_list_batched(self, message_type: ProtocolMessageTypes, payload_type: Any, data: List[Any]) -> None: + def _add_list_batched(self, message_type: ProtocolMessageTypes, payload_type: Any, data: list[Any]) -> None: if len(data) == 0: self._add_message(message_type, payload_type, [], True) return @@ -278,13 +279,13 @@ def sync_start(self, count: float, initial: bool) -> None: self._harvesting_mode, ) - def process_batch(self, loaded: List[PlotInfo], remaining: int) -> None: + def process_batch(self, loaded: list[PlotInfo], remaining: int) -> None: log.debug(f"process_batch {self}: loaded {len(loaded)}, remaining {remaining}") if len(loaded) > 0 or remaining == 0: converted = _convert_plot_info_list(loaded) self._add_message(ProtocolMessageTypes.plot_sync_loaded, PlotSyncPlotList, converted, remaining == 0) - def sync_done(self, removed: List[Path], duration: float) -> None: + def sync_done(self, removed: list[Path], duration: float) -> None: log.debug(f"sync_done {self}: removed {len(removed)}, duration {duration}") removed_list = [str(x) for x in removed] self._add_list_batched( diff --git a/chia/plotters/bladebit.py b/chia/plotters/bladebit.py index 882146853137..dfce9a3b9b32 100644 --- a/chia/plotters/bladebit.py +++ b/chia/plotters/bladebit.py @@ -7,7 +7,7 @@ import sys import traceback from pathlib import Path -from typing import Any, Dict, List, Literal, Optional, Tuple, Union +from typing import Any, Literal, Optional, Union from chia.plotters.plotters_util import get_venv_bin, reset_loop_policy_for_windows, run_command, run_plotter from chia.plotting.create_plots import resolve_plot_keys @@ -23,7 +23,7 @@ def is_bladebit_supported() -> bool: return sys.platform.startswith("linux") or sys.platform in ["win32", "cygwin", "darwin"] -def meets_memory_requirement(plotters_root_path: Path) -> Tuple[bool, Optional[str]]: +def meets_memory_requirement(plotters_root_path: Path) -> tuple[bool, Optional[str]]: have_enough_memory: bool = False warning_string: Optional[str] = None @@ -40,7 +40,7 @@ def meets_memory_requirement(plotters_root_path: Path) -> Tuple[bool, Optional[s if proc.returncode != 0: return have_enough_memory, proc.stderr.strip() - memory_info: Dict[str, int] = json.loads(proc.stdout) + memory_info: dict[str, int] = json.loads(proc.stdout) total_bytes: int = memory_info.get("total", -1) required_bytes: int = memory_info.get("required", 0) have_enough_memory = total_bytes >= required_bytes @@ -131,7 +131,7 @@ def get_bladebit_executable_path(plotters_root_path: Path) -> Path: def get_bladebit_version( plotters_root_path: Path, -) -> Union[Tuple[Literal[False], str], Tuple[None, str], Tuple[Literal[True], List[str]]]: +) -> Union[tuple[Literal[False], str], tuple[None, str], tuple[Literal[True], list[str]]]: bladebit_executable_path = get_bladebit_executable_path(plotters_root_path) if not bladebit_executable_path.exists(): # (found=False, "") @@ -157,8 +157,8 @@ def get_bladebit_version( return None, str(e) -def get_bladebit_install_info(plotters_root_path: Path) -> Optional[Dict[str, Any]]: - info: Dict[str, Any] = {"display_name": "BladeBit Plotter"} +def get_bladebit_install_info(plotters_root_path: Path) -> Optional[dict[str, Any]]: + info: dict[str, Any] = {"display_name": "BladeBit Plotter"} installed: bool = False supported: bool = is_bladebit_supported() cuda_available: bool = is_cudaplot_available(plotters_root_path) diff --git a/chia/plotters/chiapos.py b/chia/plotters/chiapos.py index bed2d23643f9..0f8eb29561ed 100644 --- a/chia/plotters/chiapos.py +++ b/chia/plotters/chiapos.py @@ -10,7 +10,7 @@ import logging from argparse import Namespace from pathlib import Path -from typing import Any, Dict, Optional +from typing import Any, Optional from chia.plotting.create_plots import create_plots, resolve_plot_keys from chia.plotting.util import Params, add_plot_directory, validate_plot_size @@ -18,7 +18,7 @@ log = logging.getLogger(__name__) -def get_chiapos_install_info() -> Optional[Dict[str, Any]]: +def get_chiapos_install_info() -> Optional[dict[str, Any]]: chiapos_version = importlib.metadata.version("chiapos") return {"display_name": "Chia Proof of Space", "version": chiapos_version, "installed": True} diff --git a/chia/plotters/madmax.py b/chia/plotters/madmax.py index e98a7962242b..fb5e49c068b8 100644 --- a/chia/plotters/madmax.py +++ b/chia/plotters/madmax.py @@ -6,7 +6,7 @@ import sys import traceback from pathlib import Path -from typing import Any, Dict, Optional +from typing import Any, Optional from chia.plotters.plotters_util import get_venv_bin, reset_loop_policy_for_windows, run_command, run_plotter from chia.plotting.create_plots import resolve_plot_keys @@ -100,8 +100,8 @@ def get_madmax_version(plotters_root_path: Path): return None, f"Failed to determine madmax version: {e} {tb}" -def get_madmax_install_info(plotters_root_path: Path) -> Optional[Dict[str, Any]]: - info: Dict[str, Any] = {"display_name": "madMAx Plotter"} +def get_madmax_install_info(plotters_root_path: Path) -> Optional[dict[str, Any]]: + info: dict[str, Any] = {"display_name": "madMAx Plotter"} installed: bool = False supported: bool = is_madmax_supported() diff --git a/chia/plotters/plotters.py b/chia/plotters/plotters.py index 2d3337b37a5f..0cdf93e472a6 100644 --- a/chia/plotters/plotters.py +++ b/chia/plotters/plotters.py @@ -5,7 +5,7 @@ import os from enum import Enum from pathlib import Path -from typing import Any, Dict, Optional +from typing import Any, Optional from chia.plotters.bladebit import get_bladebit_install_info, plot_bladebit from chia.plotters.chiapos import get_chiapos_install_info, plot_chia @@ -547,12 +547,12 @@ def call_plotters(root_path: Path, args): show_plotters_version(chia_root_path) -def get_available_plotters(root_path) -> Dict[str, Any]: +def get_available_plotters(root_path) -> dict[str, Any]: plotters_root_path: Path = get_plotters_root_path(root_path) - plotters: Dict[str, Any] = {} - chiapos: Optional[Dict[str, Any]] = get_chiapos_install_info() - bladebit: Optional[Dict[str, Any]] = get_bladebit_install_info(plotters_root_path) - madmax: Optional[Dict[str, Any]] = get_madmax_install_info(plotters_root_path) + plotters: dict[str, Any] = {} + chiapos: Optional[dict[str, Any]] = get_chiapos_install_info() + bladebit: Optional[dict[str, Any]] = get_bladebit_install_info(plotters_root_path) + madmax: Optional[dict[str, Any]] = get_madmax_install_info(plotters_root_path) if chiapos is not None: plotters["chiapos"] = chiapos diff --git a/chia/plotters/plotters_util.py b/chia/plotters/plotters_util.py index 875794f65855..f58828ce468a 100644 --- a/chia/plotters/plotters_util.py +++ b/chia/plotters/plotters_util.py @@ -7,9 +7,10 @@ import signal import subprocess import sys +from collections.abc import Iterator from datetime import datetime from pathlib import Path -from typing import Iterator, Optional, TextIO +from typing import Optional, TextIO from chia.util.chia_version import chia_short_version from chia.util.config import lock_and_load_config diff --git a/chia/plotting/cache.py b/chia/plotting/cache.py index ed8e502e520e..a615e971fc81 100644 --- a/chia/plotting/cache.py +++ b/chia/plotting/cache.py @@ -3,10 +3,11 @@ import logging import time import traceback +from collections.abc import ItemsView, KeysView, ValuesView from dataclasses import dataclass, field from math import ceil from pathlib import Path -from typing import Dict, ItemsView, KeysView, List, Optional, Tuple, ValuesView +from typing import Optional from chia_rs import G1Element from chiapos import DiskProver @@ -37,7 +38,7 @@ class DiskCacheEntry(Streamable): @streamable @dataclass(frozen=True) class CacheDataV1(Streamable): - entries: List[Tuple[str, DiskCacheEntry]] + entries: list[tuple[str, DiskCacheEntry]] @dataclass @@ -84,7 +85,7 @@ def expired(self, expiry_seconds: int) -> bool: class Cache: _path: Path _changed: bool = False - _data: Dict[Path, CacheEntry] = field(default_factory=dict) + _data: dict[Path, CacheEntry] = field(default_factory=dict) expiry_seconds: int = 7 * 24 * 60 * 60 # Keep the cache entries alive for 7 days after its last access def __post_init__(self) -> None: @@ -97,7 +98,7 @@ def update(self, path: Path, entry: CacheEntry) -> None: self._data[path] = entry self._changed = True - def remove(self, cache_keys: List[Path]) -> None: + def remove(self, cache_keys: list[Path]) -> None: for key in cache_keys: if key in self._data: del self._data[key] @@ -105,7 +106,7 @@ def remove(self, cache_keys: List[Path]) -> None: def save(self) -> None: try: - disk_cache_entries: Dict[str, DiskCacheEntry] = { + disk_cache_entries: dict[str, DiskCacheEntry] = { str(path): DiskCacheEntry( bytes(cache_entry.prover), cache_entry.farmer_public_key, @@ -136,8 +137,8 @@ def load(self) -> None: start = time.time() cache_data: CacheDataV1 = CacheDataV1.from_bytes(stored_cache.blob) self._data = {} - estimated_c2_sizes: Dict[int, int] = {} - measured_sizes: Dict[int, int] = { + estimated_c2_sizes: dict[int, int] = {} + measured_sizes: dict[int, int] = { 32: 738, 33: 1083, 34: 1771, diff --git a/chia/plotting/check_plots.py b/chia/plotting/check_plots.py index b374e1c7ec79..0b4cae105a87 100644 --- a/chia/plotting/check_plots.py +++ b/chia/plotting/check_plots.py @@ -6,7 +6,7 @@ from pathlib import Path from threading import Lock from time import sleep, time -from typing import List, Optional +from typing import Optional from chia_rs import G1Element from chiapos import Verifier @@ -103,7 +103,7 @@ def check_plots( log.info("Plot filenames expected to end with -[64 char plot ID].plot") if list_duplicates: - all_filenames: List[Path] = [] + all_filenames: list[Path] = [] for paths in get_plot_filenames(root_path).values(): all_filenames += paths find_duplicate_plot_IDs(all_filenames) @@ -135,7 +135,7 @@ def check_plots( log.info(f"Starting to test each plot with {num} challenges each\n") total_good_plots: Counter[str] = Counter() total_size = 0 - bad_plots_list: List[Path] = [] + bad_plots_list: list[Path] = [] with plot_manager: diff --git a/chia/plotting/create_plots.py b/chia/plotting/create_plots.py index c6321d792cb5..09208740da41 100644 --- a/chia/plotting/create_plots.py +++ b/chia/plotting/create_plots.py @@ -3,7 +3,7 @@ import logging from datetime import datetime from pathlib import Path -from typing import Dict, List, Optional, Tuple +from typing import Optional from chia_rs import AugSchemeMPL, G1Element, PrivateKey from chiapos import DiskPlotter @@ -105,7 +105,7 @@ async def get_sk(self, keychain_proxy: Optional[KeychainProxy] = None) -> Option except Exception as e: log.error(f"Keychain proxy failed with error: {e}") else: - sk_ent: Optional[Tuple[PrivateKey, bytes]] = None + sk_ent: Optional[tuple[PrivateKey, bytes]] = None keychain: Keychain = Keychain() if self.alt_fingerprint is not None: sk_ent = keychain.get_private_key_by_fingerprint(self.alt_fingerprint) @@ -151,8 +151,8 @@ async def create_plots( args: Params, keys: PlotKeys, use_datetime: bool = True, - test_private_keys: Optional[List[PrivateKey]] = None, -) -> Tuple[Dict[bytes32, Path], Dict[bytes32, Path]]: + test_private_keys: Optional[list[PrivateKey]] = None, +) -> tuple[dict[bytes32, Path], dict[bytes32, Path]]: if args.tmp2_dir is None: args.tmp2_dir = args.tmp_dir assert (keys.pool_public_key is None) != (keys.pool_contract_puzzle_hash is None) @@ -182,8 +182,8 @@ async def create_plots( args.final_dir.mkdir(parents=True, exist_ok=True) - created_plots: Dict[bytes32, Path] = {} - existing_plots: Dict[bytes32, Path] = {} + created_plots: dict[bytes32, Path] = {} + existing_plots: dict[bytes32, Path] = {} for i in range(num): # Generate a random master secret key if test_private_keys is not None: diff --git a/chia/plotting/manager.py b/chia/plotting/manager.py index 4071bc1ca802..89990cb3a036 100644 --- a/chia/plotting/manager.py +++ b/chia/plotting/manager.py @@ -6,7 +6,7 @@ import traceback from concurrent.futures.thread import ThreadPoolExecutor from pathlib import Path -from typing import Any, Callable, Dict, List, Optional, Set, Tuple +from typing import Any, Callable, Optional from chia_rs import G1Element from chiapos import DiskProver, decompressor_context_queue @@ -27,13 +27,13 @@ class PlotManager: - plots: Dict[Path, PlotInfo] - plot_filename_paths: Dict[str, Tuple[str, Set[str]]] + plots: dict[Path, PlotInfo] + plot_filename_paths: dict[str, tuple[str, set[str]]] plot_filename_paths_lock: threading.Lock - failed_to_open_filenames: Dict[Path, int] - no_key_filenames: Set[Path] - farmer_public_keys: List[G1Element] - pool_public_keys: List[G1Element] + failed_to_open_filenames: dict[Path, int] + no_key_filenames: set[Path] + farmer_public_keys: list[G1Element] + pool_public_keys: list[G1Element] cache: Cache match_str: Optional[str] open_no_key_filenames: bool @@ -137,7 +137,7 @@ def reset(self) -> None: def set_refresh_callback(self, callback: Callable): self._refresh_callback = callback - def set_public_keys(self, farmer_public_keys: List[G1Element], pool_public_keys: List[G1Element]): + def set_public_keys(self, farmer_public_keys: list[G1Element], pool_public_keys: list[G1Element]): self.farmer_public_keys = farmer_public_keys self.pool_public_keys = pool_public_keys @@ -151,7 +151,7 @@ def plot_count(self) -> int: with self: return len(self.plots) - def get_duplicates(self) -> List[Path]: + def get_duplicates(self) -> list[Path]: result = [] for plot_filename, paths_entry in self.plot_filename_paths.items(): _, duplicated_paths = paths_entry @@ -188,9 +188,9 @@ def _refresh_task(self, sleep_interval_ms: int): if not self._refreshing_enabled: return - plot_filenames: Dict[Path, List[Path]] = get_plot_filenames(self.root_path) - plot_directories: Set[Path] = set(plot_filenames.keys()) - plot_paths: Set[Path] = set() + plot_filenames: dict[Path, list[Path]] = get_plot_filenames(self.root_path) + plot_directories: set[Path] = set(plot_filenames.keys()) + plot_paths: set[Path] = set() for paths in plot_filenames.values(): plot_paths.update(paths) @@ -208,7 +208,7 @@ def _refresh_task(self, sleep_interval_ms: int): if path not in plot_paths: self.no_key_filenames.remove(path) - filenames_to_remove: List[str] = [] + filenames_to_remove: list[str] = [] for plot_filename, paths_entry in self.plot_filename_paths.items(): loaded_path, duplicated_paths = paths_entry loaded_plot = Path(loaded_path) / Path(plot_filename) @@ -221,7 +221,7 @@ def _refresh_task(self, sleep_interval_ms: int): # No need to check the duplicates here since we drop the whole entry continue - paths_to_remove: List[str] = [] + paths_to_remove: list[str] = [] for path_str in duplicated_paths: loaded_plot = Path(path_str) / Path(plot_filename) if loaded_plot not in plot_paths: @@ -258,7 +258,7 @@ def _refresh_task(self, sleep_interval_ms: int): # Cleanup unused cache self.log.debug(f"_refresh_task: cached entries before cleanup: {len(self.cache)}") - remove_paths: List[Path] = [] + remove_paths: list[Path] = [] for path, cache_entry in self.cache.items(): if cache_entry.expired(Cache.expiry_seconds) and path not in self.plots: remove_paths.append(path) @@ -281,7 +281,7 @@ def _refresh_task(self, sleep_interval_ms: int): log.error(f"_refresh_callback raised: {e} with the traceback: {traceback.format_exc()}") self.reset() - def refresh_batch(self, plot_paths: List[Path], plot_directories: Set[Path]) -> PlotRefreshResult: + def refresh_batch(self, plot_paths: list[Path], plot_directories: set[Path]) -> PlotRefreshResult: start_time: float = time.time() result: PlotRefreshResult = PlotRefreshResult(processed=len(plot_paths)) counter_lock = threading.Lock() @@ -308,7 +308,7 @@ def process_file(file_path: Path) -> Optional[PlotInfo]: if file_path in self.plots: return self.plots[file_path] - entry: Optional[Tuple[str, Set[str]]] = self.plot_filename_paths.get(file_path.name) + entry: Optional[tuple[str, set[str]]] = self.plot_filename_paths.get(file_path.name) if entry is not None: loaded_parent, duplicates = entry if str(file_path.parent) in duplicates: @@ -383,7 +383,7 @@ def process_file(file_path: Path) -> Optional[PlotInfo]: self.no_key_filenames.remove(file_path) with self.plot_filename_paths_lock: - paths: Optional[Tuple[str, Set[str]]] = self.plot_filename_paths.get(file_path.name) + paths: Optional[tuple[str, set[str]]] = self.plot_filename_paths.get(file_path.name) if paths is None: paths = (str(Path(cache_entry.prover.get_filename()).parent), set()) self.plot_filename_paths[file_path.name] = paths @@ -419,7 +419,7 @@ def process_file(file_path: Path) -> Optional[PlotInfo]: return new_plot_info with self, ThreadPoolExecutor() as executor: - plots_refreshed: Dict[Path, PlotInfo] = {} + plots_refreshed: dict[Path, PlotInfo] = {} for new_plot in executor.map(process_file, plot_paths): if new_plot is not None: plots_refreshed[Path(new_plot.prover.get_filename())] = new_plot diff --git a/chia/plotting/util.py b/chia/plotting/util.py index f74b0e4d2733..b0b1642e46d9 100644 --- a/chia/plotting/util.py +++ b/chia/plotting/util.py @@ -4,7 +4,7 @@ from dataclasses import dataclass, field from enum import Enum, IntEnum from pathlib import Path -from typing import Any, Dict, List, Optional, Tuple, Union +from typing import Any, Optional, Union from chia_rs import G1Element, PrivateKey from chiapos import DiskProver @@ -70,8 +70,8 @@ class PlotRefreshEvents(Enum): @dataclass class PlotRefreshResult: - loaded: List[PlotInfo] = field(default_factory=list) - removed: List[Path] = field(default_factory=list) + loaded: list[PlotInfo] = field(default_factory=list) + removed: list[Path] = field(default_factory=list) processed: int = 0 remaining: int = 0 duration: float = 0 @@ -99,15 +99,15 @@ class HarvestingMode(IntEnum): GPU = 2 -def get_plot_directories(root_path: Path, config: Dict = None) -> List[str]: +def get_plot_directories(root_path: Path, config: dict = None) -> list[str]: if config is None: config = load_config(root_path, "config.yaml") return config["harvester"]["plot_directories"] or [] -def get_plot_filenames(root_path: Path) -> Dict[Path, List[Path]]: +def get_plot_filenames(root_path: Path) -> dict[Path, list[Path]]: # Returns a map from directory to a list of all plots in the directory - all_files: Dict[Path, List[Path]] = {} + all_files: dict[Path, list[Path]] = {} config = load_config(root_path, "config.yaml") recursive_scan: bool = config["harvester"].get("recursive_plot_scan", DEFAULT_RECURSIVE_PLOT_SCAN) for directory_name in get_plot_directories(root_path, config): @@ -120,7 +120,7 @@ def get_plot_filenames(root_path: Path) -> Dict[Path, List[Path]]: return all_files -def add_plot_directory(root_path: Path, str_path: str) -> Dict: +def add_plot_directory(root_path: Path, str_path: str) -> dict: path: Path = Path(str_path).resolve() if not path.exists(): raise ValueError(f"Path doesn't exist: {path}") @@ -140,7 +140,7 @@ def add_plot_directory(root_path: Path, str_path: str) -> Dict: def remove_plot_directory(root_path: Path, str_path: str) -> None: log.debug(f"remove_plot_directory {str_path}") with lock_and_load_config(root_path, "config.yaml") as config: - str_paths: List[str] = get_plot_directories(root_path, config) + str_paths: list[str] = get_plot_directories(root_path, config) # If path str matches exactly, remove if str_path in str_paths: str_paths.remove(str_path) @@ -161,7 +161,7 @@ def remove_plot(path: Path): path.unlink() -def get_harvester_config(root_path: Path) -> Dict[str, Any]: +def get_harvester_config(root_path: Path) -> dict[str, Any]: config = load_config(root_path, "config.yaml") plots_refresh_parameter = ( @@ -219,7 +219,7 @@ def update_harvester_config( save_config(root_path, "config.yaml", config) -def get_filenames(directory: Path, recursive: bool) -> List[Path]: +def get_filenames(directory: Path, recursive: bool) -> list[Path]: try: if not directory.exists(): log.warning(f"Directory: {directory} does not exist.") @@ -227,7 +227,7 @@ def get_filenames(directory: Path, recursive: bool) -> List[Path]: except OSError as e: log.warning(f"Error checking if directory {directory} exists: {e}") return [] - all_files: List[Path] = [] + all_files: list[Path] = [] try: glob_function = directory.rglob if recursive else directory.glob all_files = [child for child in glob_function("*.plot") if child.is_file() and not child.name.startswith("._")] @@ -237,7 +237,7 @@ def get_filenames(directory: Path, recursive: bool) -> List[Path]: return all_files -def parse_plot_info(memo: bytes) -> Tuple[Union[G1Element, bytes32], G1Element, PrivateKey]: +def parse_plot_info(memo: bytes) -> tuple[Union[G1Element, bytes32], G1Element, PrivateKey]: # Parses the plot info bytes into keys if len(memo) == (48 + 48 + 32): # This is a public key memo @@ -286,12 +286,12 @@ def find_duplicate_plot_IDs(all_filenames=None) -> None: all_filenames = [] plot_ids_set = set() duplicate_plot_ids = set() - all_filenames_str: List[str] = [] + all_filenames_str: list[str] = [] for filename in all_filenames: filename_str: str = str(filename) all_filenames_str.append(filename_str) - filename_parts: List[str] = filename_str.split("-") + filename_parts: list[str] = filename_str.split("-") plot_id: str = filename_parts[-1] # Skipped parsing and verifying plot ID for faster performance # Skipped checking K size for faster performance @@ -306,7 +306,7 @@ def find_duplicate_plot_IDs(all_filenames=None) -> None: for plot_id in duplicate_plot_ids: log_message: str = plot_id + " found in multiple files:\n" - duplicate_filenames: List[str] = [filename_str for filename_str in all_filenames_str if plot_id in filename_str] + duplicate_filenames: list[str] = [filename_str for filename_str in all_filenames_str if plot_id in filename_str] for filename_str in duplicate_filenames: log_message += "\t" + filename_str + "\n" log.warning(f"{log_message}") diff --git a/chia/pools/pool_config.py b/chia/pools/pool_config.py index 98874df40ec6..6c4523a6ebb2 100644 --- a/chia/pools/pool_config.py +++ b/chia/pools/pool_config.py @@ -3,7 +3,7 @@ import logging from dataclasses import dataclass from pathlib import Path -from typing import Any, Callable, Dict, List +from typing import Any, Callable from chia_rs import G1Element @@ -38,9 +38,9 @@ class PoolWalletConfig(Streamable): owner_public_key: G1Element -def load_pool_config(root_path: Path) -> List[PoolWalletConfig]: +def load_pool_config(root_path: Path) -> list[PoolWalletConfig]: config = load_config(root_path, "config.yaml") - ret_list: List[PoolWalletConfig] = [] + ret_list: list[PoolWalletConfig] = [] pool_list = config["pool"].get("pool_list", []) if pool_list is None: return ret_list @@ -63,7 +63,7 @@ def load_pool_config(root_path: Path) -> List[PoolWalletConfig]: def update_pool_url(root_path: Path, pool_wallet_config: PoolWalletConfig, pool_url: str) -> None: - def update_pool_url_for_entry(config_entry: Dict[str, Any]) -> bool: + def update_pool_url_for_entry(config_entry: dict[str, Any]) -> bool: if config_entry.get("pool_url", "") != pool_url: config_entry["pool_url"] = pool_url @@ -82,7 +82,7 @@ def update_pool_url_for_entry(config_entry: Dict[str, Any]) -> bool: def update_pool_config_entry( root_path: Path, pool_wallet_config: PoolWalletConfig, - update_closure: Callable[[Dict[str, Any]], bool], + update_closure: Callable[[dict[str, Any]], bool], update_log_message: str, ) -> None: with lock_and_load_config(root_path, "config.yaml") as config: @@ -104,7 +104,7 @@ def update_pool_config_entry( save_config(root_path, "config.yaml", config) -async def update_pool_config(root_path: Path, pool_config_list: List[PoolWalletConfig]) -> None: +async def update_pool_config(root_path: Path, pool_config_list: list[PoolWalletConfig]) -> None: with lock_and_load_config(root_path, "config.yaml") as full_config: full_config["pool"]["pool_list"] = [c.to_json_dict() for c in pool_config_list] save_config(root_path, "config.yaml", full_config) diff --git a/chia/pools/pool_puzzles.py b/chia/pools/pool_puzzles.py index 9825ac68243b..d2d6fd242814 100644 --- a/chia/pools/pool_puzzles.py +++ b/chia/pools/pool_puzzles.py @@ -1,7 +1,7 @@ from __future__ import annotations import logging -from typing import List, Optional, Tuple +from typing import Optional from chia_rs import G1Element from clvm.casts import int_to_bytes @@ -115,7 +115,7 @@ def launcher_id_to_p2_puzzle_hash(launcher_id: bytes32, seconds_delay: uint64, d return create_p2_singleton_puzzle_hash(SINGLETON_MOD_HASH, launcher_id, seconds_delay, delayed_puzzle_hash) -def get_delayed_puz_info_from_launcher_spend(coinsol: CoinSpend) -> Tuple[uint64, bytes32]: +def get_delayed_puz_info_from_launcher_spend(coinsol: CoinSpend) -> tuple[uint64, bytes32]: extra_data = Program.from_bytes(bytes(coinsol.solution)).rest().rest().first() # Extra data is (pool_state delayed_puz_info) # Delayed puz info is (seconds delayed_puzzle_hash) @@ -147,7 +147,7 @@ def get_template_singleton_inner_puzzle(inner_puzzle: Program) -> Program: return uncurried_inner_puzzle -def get_seconds_and_delayed_puzhash_from_p2_singleton_puzzle(puzzle: Program) -> Tuple[uint64, bytes32]: +def get_seconds_and_delayed_puzhash_from_p2_singleton_puzzle(puzzle: Program) -> tuple[uint64, bytes32]: r = puzzle.uncurry() if r is None: return False @@ -185,7 +185,7 @@ def create_travel_spend( genesis_challenge: bytes32, delay_time: uint64, delay_ph: bytes32, -) -> Tuple[CoinSpend, Program]: +) -> tuple[CoinSpend, Program]: inner_puzzle: Program = pool_state_to_inner_puzzle( current, launcher_coin.name(), @@ -251,7 +251,7 @@ def create_absorb_spend( genesis_challenge: bytes32, delay_time: uint64, delay_ph: bytes32, -) -> List[CoinSpend]: +) -> list[CoinSpend]: inner_puzzle: Program = pool_state_to_inner_puzzle( current_state, launcher_coin.name(), genesis_challenge, delay_time, delay_ph ) @@ -309,7 +309,7 @@ def create_absorb_spend( def get_most_recent_singleton_coin_from_coin_spend(coin_sol: CoinSpend) -> Optional[Coin]: - additions: List[Coin] = compute_additions(coin_sol) + additions: list[Coin] = compute_additions(coin_sol) for coin in additions: if coin.amount % 2 == 1: return coin @@ -335,7 +335,7 @@ def get_pubkey_from_member_inner_puzzle(inner_puzzle: Program) -> G1Element: def uncurry_pool_member_inner_puzzle( inner_puzzle: Program, -) -> Tuple[Program, Program, Program, Program, Program, Program]: +) -> tuple[Program, Program, Program, Program, Program, Program]: """ Take a puzzle and return `None` if it's not a "pool member" inner puzzle, or a triple of `mod_hash, relative_lock_height, pubkey` if it is. @@ -352,7 +352,7 @@ def uncurry_pool_member_inner_puzzle( return inner_f, target_puzzle_hash, p2_singleton_hash, owner_pubkey, pool_reward_prefix, escape_puzzlehash -def uncurry_pool_waitingroom_inner_puzzle(inner_puzzle: Program) -> Tuple[Program, Program, Program, Program]: +def uncurry_pool_waitingroom_inner_puzzle(inner_puzzle: Program) -> tuple[Program, Program, Program, Program]: """ Take a puzzle and return `None` if it's not a "pool member" inner puzzle, or a triple of `mod_hash, relative_lock_height, pubkey` if it is. diff --git a/chia/pools/pool_wallet.py b/chia/pools/pool_wallet.py index 554f89c8e73d..4d99f1e3d4de 100644 --- a/chia/pools/pool_wallet.py +++ b/chia/pools/pool_wallet.py @@ -3,7 +3,7 @@ import dataclasses import logging import time -from typing import TYPE_CHECKING, Any, ClassVar, Dict, List, Optional, Set, Tuple, cast +from typing import TYPE_CHECKING, Any, ClassVar, Optional, cast from chia_rs import G1Element, G2Element, PrivateKey from typing_extensions import final @@ -80,7 +80,7 @@ class PoolWallet: next_transaction_fee: uint64 = uint64(0) next_tx_config: TXConfig = DEFAULT_TX_CONFIG target_state: Optional[PoolState] = None - _owner_sk_and_index: Optional[Tuple[PrivateKey, uint32]] = None + _owner_sk_and_index: Optional[tuple[PrivateKey, uint32]] = None """ From the user's perspective, this is not a wallet at all, but a way to control @@ -191,12 +191,12 @@ def _verify_initial_target_state(cls, initial_target_state: PoolState) -> None: if err: raise ValueError(f"Invalid internal Pool State: {err}: {initial_target_state}") - async def get_spend_history(self) -> List[Tuple[uint32, CoinSpend]]: + async def get_spend_history(self) -> list[tuple[uint32, CoinSpend]]: return await self.wallet_state_manager.pool_store.get_spends_for_wallet(self.wallet_id) async def get_current_state(self) -> PoolWalletInfo: - history: List[Tuple[uint32, CoinSpend]] = await self.get_spend_history() - all_spends: List[CoinSpend] = [cs for _, cs in history] + history: list[tuple[uint32, CoinSpend]] = await self.get_spend_history() + all_spends: list[CoinSpend] = [cs for _, cs in history] # We must have at least the launcher spend assert len(all_spends) >= 1 @@ -228,16 +228,16 @@ async def get_current_state(self) -> PoolWalletInfo: last_singleton_spend_height, ) - async def get_unconfirmed_transactions(self) -> List[TransactionRecord]: + async def get_unconfirmed_transactions(self) -> list[TransactionRecord]: return await self.wallet_state_manager.tx_store.get_unconfirmed_for_wallet(self.wallet_id) - async def get_tip(self) -> Tuple[uint32, CoinSpend]: + async def get_tip(self) -> tuple[uint32, CoinSpend]: return (await self.wallet_state_manager.pool_store.get_spends_for_wallet(self.wallet_id))[-1] async def update_pool_config(self) -> None: current_state: PoolWalletInfo = await self.get_current_state() - pool_config_list: List[PoolWalletConfig] = load_pool_config(self.wallet_state_manager.root_path) - pool_config_dict: Dict[bytes32, PoolWalletConfig] = {c.launcher_id: c for c in pool_config_list} + pool_config_list: list[PoolWalletConfig] = load_pool_config(self.wallet_state_manager.root_path) + pool_config_dict: dict[bytes32, PoolWalletConfig] = {c.launcher_id: c for c in pool_config_list} existing_config: Optional[PoolWalletConfig] = pool_config_dict.get(current_state.launcher_id, None) payout_instructions: str = existing_config.payout_instructions if existing_config is not None else "" @@ -262,7 +262,7 @@ async def apply_state_transition(self, new_state: CoinSpend, block_height: uint3 The DB must be committed after calling this method. All validation should be done here. Returns True iff the spend is a valid transition spend for the singleton, False otherwise. """ - tip: Tuple[uint32, CoinSpend] = await self.get_tip() + tip: tuple[uint32, CoinSpend] = await self.get_tip() tip_spend = tip[1] tip_coin: Optional[Coin] = get_most_recent_singleton_coin_from_coin_spend(tip_spend) @@ -270,7 +270,7 @@ async def apply_state_transition(self, new_state: CoinSpend, block_height: uint3 spent_coin_name: bytes32 = tip_coin.name() if spent_coin_name != new_state.coin.name(): - history: List[Tuple[uint32, CoinSpend]] = await self.get_spend_history() + history: list[tuple[uint32, CoinSpend]] = await self.get_spend_history() if new_state.coin.name() in [sp.coin.name() for _, sp in history]: self.log.info(f"Already have state transition: {new_state.coin.name().hex()}") else: @@ -304,7 +304,7 @@ async def rewind(self, block_height: int) -> bool: Returns True if the wallet should be removed. """ try: - history: List[Tuple[uint32, CoinSpend]] = await self.wallet_state_manager.pool_store.get_spends_for_wallet( + history: list[tuple[uint32, CoinSpend]] = await self.wallet_state_manager.pool_store.get_spends_for_wallet( self.wallet_id ) prev_state: PoolWalletInfo = await self.get_current_state() @@ -326,7 +326,7 @@ async def create( wallet_state_manager: Any, wallet: Wallet, launcher_coin_id: bytes32, - block_spends: List[CoinSpend], + block_spends: list[CoinSpend], block_height: uint32, *, name: Optional[str] = None, @@ -391,8 +391,8 @@ async def create_new_pool_wallet_transaction( fee: uint64 = uint64(0), p2_singleton_delay_time: Optional[uint64] = None, p2_singleton_delayed_ph: Optional[bytes32] = None, - extra_conditions: Tuple[Condition, ...] = tuple(), - ) -> Tuple[bytes32, bytes32]: + extra_conditions: tuple[Condition, ...] = tuple(), + ) -> tuple[bytes32, bytes32]: """ A "plot NFT", or pool wallet, represents the idea of a set of plots that all pay to the same pooling puzzle. This puzzle is a `chia singleton` that is @@ -437,7 +437,7 @@ async def create_new_pool_wallet_transaction( return p2_singleton_puzzle_hash, launcher_coin_id - async def _get_owner_key_cache(self) -> Tuple[PrivateKey, uint32]: + async def _get_owner_key_cache(self) -> tuple[PrivateKey, uint32]: if self._owner_sk_and_index is None: self._owner_sk_and_index = find_owner_sk( [self.wallet_state_manager.get_master_private_key()], @@ -453,7 +453,7 @@ async def generate_fee_transaction( self, fee: uint64, action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> None: await self.standard_wallet.generate_signed_transaction( uint64(0), @@ -570,13 +570,13 @@ async def generate_launcher_spend( delay_time: uint64, delay_ph: bytes32, action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), - ) -> Tuple[bytes32, bytes32]: + extra_conditions: tuple[Condition, ...] = tuple(), + ) -> tuple[bytes32, bytes32]: """ Creates the initial singleton, which includes spending an origin coin, the launcher, and creating a singleton with the "pooling" inner state, which can be either self pooling or using a pool """ - coins: Set[Coin] = await standard_wallet.select_coins(uint64(amount + fee), action_scope) + coins: set[Coin] = await standard_wallet.select_coins(uint64(amount + fee), action_scope) if coins is None: raise ValueError("Not enough coins to create pool wallet") @@ -674,7 +674,7 @@ async def join_pool(self, target_state: PoolState, fee: uint64, action_scope: Wa ) PoolWallet._verify_initial_target_state(target_state) if current_state.current.state == LEAVING_POOL.value: - history: List[Tuple[uint32, CoinSpend]] = await self.get_spend_history() + history: list[tuple[uint32, CoinSpend]] = await self.get_spend_history() last_height: uint32 = history[-1][0] if ( await self.wallet_state_manager.blockchain.get_finished_sync_up_to() @@ -711,7 +711,7 @@ async def self_pool(self, fee: uint64, action_scope: WalletActionScope) -> uint6 if current_state.current.state == LEAVING_POOL.value: total_fee = fee - history: List[Tuple[uint32, CoinSpend]] = await self.get_spend_history() + history: list[tuple[uint32, CoinSpend]] = await self.get_spend_history() last_height: uint32 = history[-1][0] if ( await self.wallet_state_manager.blockchain.get_finished_sync_up_to() @@ -746,21 +746,21 @@ async def claim_pool_rewards( unspent_coin_records = await self.wallet_state_manager.coin_store.get_unspent_coins_for_wallet(self.wallet_id) if len(unspent_coin_records) == 0: raise ValueError("Nothing to claim, no transactions to p2_singleton_puzzle_hash") - farming_rewards: List[TransactionRecord] = await self.wallet_state_manager.tx_store.get_farming_rewards() - coin_to_height_farmed: Dict[Coin, uint32] = {} + farming_rewards: list[TransactionRecord] = await self.wallet_state_manager.tx_store.get_farming_rewards() + coin_to_height_farmed: dict[Coin, uint32] = {} for tx_record in farming_rewards: height_farmed: Optional[uint32] = tx_record.height_farmed( self.wallet_state_manager.constants.GENESIS_CHALLENGE ) assert height_farmed is not None coin_to_height_farmed[tx_record.additions[0]] = height_farmed - history: List[Tuple[uint32, CoinSpend]] = await self.get_spend_history() + history: list[tuple[uint32, CoinSpend]] = await self.get_spend_history() assert len(history) > 0 delayed_seconds, delayed_puzhash = get_delayed_puz_info_from_launcher_spend(history[0][1]) current_state: PoolWalletInfo = await self.get_current_state() last_solution: CoinSpend = history[-1][1] - all_spends: List[CoinSpend] = [] + all_spends: list[CoinSpend] = [] total_amount = 0 # The coins being claimed are gathered into the `SpendBundle`, :absorb_spend: @@ -778,7 +778,7 @@ async def claim_pool_rewards( self.log.info(f"pool wallet truncating absorb to {max_spends_in_tx} spends to fit into block") print(f"pool wallet truncating absorb to {max_spends_in_tx} spends to fit into block") break - absorb_spend: List[CoinSpend] = create_absorb_spend( + absorb_spend: list[CoinSpend] = create_absorb_spend( last_solution, current_state.current, current_state.launcher_coin, @@ -853,7 +853,7 @@ async def new_peak(self, peak_height: uint32) -> None: # Add some buffer (+2) to reduce chances of a reorg if peak_height > leave_height + 2: - unconfirmed: List[TransactionRecord] = ( + unconfirmed: list[TransactionRecord] = ( await self.wallet_state_manager.tx_store.get_unconfirmed_for_wallet(self.wallet_id) ) next_tip: Optional[Coin] = get_most_recent_singleton_coin_from_coin_spend(tip_spend) @@ -879,7 +879,7 @@ async def new_peak(self, peak_height: uint32) -> None: await self.generate_travel_transactions(self.next_transaction_fee, action_scope) async def have_unconfirmed_transaction(self) -> bool: - unconfirmed: List[TransactionRecord] = await self.wallet_state_manager.tx_store.get_unconfirmed_for_wallet( + unconfirmed: list[TransactionRecord] = await self.wallet_state_manager.tx_store.get_unconfirmed_for_wallet( self.wallet_id ) return len(unconfirmed) > 0 @@ -887,7 +887,7 @@ async def have_unconfirmed_transaction(self) -> bool: async def get_confirmed_balance(self, _: Optional[object] = None) -> uint128: amount: uint128 = uint128(0) if (await self.get_current_state()).current.state == SELF_POOLING.value: - unspent_coin_records: List[WalletCoinRecord] = list( + unspent_coin_records: list[WalletCoinRecord] = list( await self.wallet_state_manager.coin_store.get_unspent_coins_for_wallet(self.wallet_id) ) for record in unspent_coin_records: @@ -904,13 +904,13 @@ async def get_spendable_balance(self, record_list: Optional[object] = None) -> u async def get_pending_change_balance(self) -> uint64: return uint64(0) - async def get_max_send_amount(self, records: Optional[Set[WalletCoinRecord]] = None) -> uint128: + async def get_max_send_amount(self, records: Optional[set[WalletCoinRecord]] = None) -> uint128: return uint128(0) async def coin_added(self, coin: Coin, height: uint32, peer: WSChiaConnection, coin_data: Optional[object]) -> None: pass - async def select_coins(self, amount: uint64, action_scope: WalletActionScope) -> Set[Coin]: + async def select_coins(self, amount: uint64, action_scope: WalletActionScope) -> set[Coin]: raise RuntimeError("PoolWallet does not support select_coins()") def require_derivation_paths(self) -> bool: diff --git a/chia/pools/pool_wallet_info.py b/chia/pools/pool_wallet_info.py index f42580f08f37..e7b25c78c0ac 100644 --- a/chia/pools/pool_wallet_info.py +++ b/chia/pools/pool_wallet_info.py @@ -2,7 +2,7 @@ from dataclasses import dataclass from enum import IntEnum -from typing import Any, Dict, Optional +from typing import Any, Optional from chia_rs import G1Element @@ -62,7 +62,7 @@ class PoolState(Streamable): def initial_pool_state_from_dict( - state_dict: Dict[str, Any], + state_dict: dict[str, Any], owner_pubkey: G1Element, owner_puzzle_hash: bytes32, ) -> PoolState: diff --git a/chia/protocols/full_node_protocol.py b/chia/protocols/full_node_protocol.py index 4a3607dfcb81..e8d3b2f56c70 100644 --- a/chia/protocols/full_node_protocol.py +++ b/chia/protocols/full_node_protocol.py @@ -1,7 +1,7 @@ from __future__ import annotations from dataclasses import dataclass -from typing import List, Optional +from typing import Optional from chia.types.blockchain_format.sized_bytes import bytes32 from chia.types.blockchain_format.vdf import VDFInfo, VDFProof @@ -90,7 +90,7 @@ class RequestBlocks(Streamable): class RespondBlocks(Streamable): start_height: uint32 end_height: uint32 - blocks: List[FullBlock] + blocks: list[FullBlock] @streamable @@ -216,4 +216,4 @@ class RequestPeers(Streamable): @streamable @dataclass(frozen=True) class RespondPeers(Streamable): - peer_list: List[TimestampedPeerInfo] + peer_list: list[TimestampedPeerInfo] diff --git a/chia/protocols/harvester_protocol.py b/chia/protocols/harvester_protocol.py index bf820da33d2d..93bf4bba7b61 100644 --- a/chia/protocols/harvester_protocol.py +++ b/chia/protocols/harvester_protocol.py @@ -2,7 +2,7 @@ from dataclasses import dataclass from enum import IntEnum -from typing import List, Optional, Tuple +from typing import Optional from chia_rs import G1Element, G2Element @@ -29,8 +29,8 @@ class PoolDifficulty(Streamable): @streamable @dataclass(frozen=True) class HarvesterHandshake(Streamable): - farmer_public_keys: List[G1Element] - pool_public_keys: List[G1Element] + farmer_public_keys: list[G1Element] + pool_public_keys: list[G1Element] @streamable @@ -41,7 +41,7 @@ class NewSignagePointHarvester(Streamable): sub_slot_iters: uint64 signage_point_index: uint8 sp_hash: bytes32 - pool_difficulties: List[PoolDifficulty] + pool_difficulties: list[PoolDifficulty] filter_prefix_bits: uint8 @@ -90,9 +90,9 @@ class RequestSignatures(Streamable): plot_identifier: str challenge_hash: bytes32 sp_hash: bytes32 - messages: List[bytes32] + messages: list[bytes32] # This, and rc_block_unfinished are only set when using a third-party harvester (see CHIP-22) - message_data: Optional[List[Optional[SignatureRequestSourceData]]] + message_data: Optional[list[Optional[SignatureRequestSourceData]]] rc_block_unfinished: Optional[RewardChainBlockUnfinished] @@ -104,7 +104,7 @@ class RespondSignatures(Streamable): sp_hash: bytes32 local_pk: G1Element farmer_pk: G1Element - message_signatures: List[Tuple[bytes32, G2Element]] + message_signatures: list[tuple[bytes32, G2Element]] include_source_signature_data: bool farmer_reward_address_override: Optional[bytes32] @@ -132,9 +132,9 @@ class RequestPlots(Streamable): @streamable @dataclass(frozen=True) class RespondPlots(Streamable): - plots: List[Plot] - failed_to_open_filenames: List[str] - no_key_filenames: List[str] + plots: list[Plot] + failed_to_open_filenames: list[str] + no_key_filenames: list[str] @streamable @@ -166,7 +166,7 @@ def __str__(self) -> str: @dataclass(frozen=True) class PlotSyncPathList(Streamable): identifier: PlotSyncIdentifier - data: List[str] + data: list[str] final: bool def __str__(self) -> str: @@ -177,7 +177,7 @@ def __str__(self) -> str: @dataclass(frozen=True) class PlotSyncPlotList(Streamable): identifier: PlotSyncIdentifier - data: List[Plot] + data: list[Plot] final: bool def __str__(self) -> str: diff --git a/chia/protocols/introducer_protocol.py b/chia/protocols/introducer_protocol.py index 2247bcb2408b..377a6e212c4f 100644 --- a/chia/protocols/introducer_protocol.py +++ b/chia/protocols/introducer_protocol.py @@ -1,7 +1,6 @@ from __future__ import annotations from dataclasses import dataclass -from typing import List from chia.types.peer_info import TimestampedPeerInfo from chia.util.streamable import Streamable, streamable @@ -23,4 +22,4 @@ class RequestPeersIntroducer(Streamable): @streamable @dataclass(frozen=True) class RespondPeersIntroducer(Streamable): - peer_list: List[TimestampedPeerInfo] + peer_list: list[TimestampedPeerInfo] diff --git a/chia/protocols/shared_protocol.py b/chia/protocols/shared_protocol.py index a2ad7419a62c..69fe8e8881e3 100644 --- a/chia/protocols/shared_protocol.py +++ b/chia/protocols/shared_protocol.py @@ -2,7 +2,7 @@ from dataclasses import dataclass from enum import IntEnum -from typing import List, Optional, Tuple +from typing import Optional from chia.server.outbound_message import NodeType from chia.util.ints import int16, uint8, uint16 @@ -47,7 +47,7 @@ class Capability(IntEnum): # These are the default capabilities used in all outgoing handshakes. # "1" means the capability is supported and enabled. -_capabilities: List[Tuple[uint16, str]] = [ +_capabilities: list[tuple[uint16, str]] = [ (uint16(Capability.BASE.value), "1"), (uint16(Capability.BLOCK_HEADERS.value), "1"), (uint16(Capability.RATE_LIMITS_V2.value), "1"), @@ -75,7 +75,7 @@ class Handshake(Streamable): software_version: str server_port: uint16 node_type: uint8 - capabilities: List[Tuple[uint16, str]] + capabilities: list[tuple[uint16, str]] @streamable diff --git a/chia/protocols/timelord_protocol.py b/chia/protocols/timelord_protocol.py index 48dbfab4086e..55c3c2da055a 100644 --- a/chia/protocols/timelord_protocol.py +++ b/chia/protocols/timelord_protocol.py @@ -1,7 +1,7 @@ from __future__ import annotations from dataclasses import dataclass -from typing import List, Optional, Tuple +from typing import Optional from chia.types.blockchain_format.foliage import Foliage from chia.types.blockchain_format.reward_chain_block import RewardChainBlock, RewardChainBlockUnfinished @@ -28,7 +28,7 @@ class NewPeakTimelord(Streamable): sub_epoch_summary: Optional[ SubEpochSummary ] # If NewPeak is the last slot in epoch, the next slot should include this - previous_reward_challenges: List[Tuple[bytes32, uint128]] + previous_reward_challenges: list[tuple[bytes32, uint128]] last_challenge_sb_or_eos_total_iters: uint128 passes_ses_height_but_not_yet_included: bool diff --git a/chia/protocols/wallet_protocol.py b/chia/protocols/wallet_protocol.py index 22a44ca26f04..d6a55d61473a 100644 --- a/chia/protocols/wallet_protocol.py +++ b/chia/protocols/wallet_protocol.py @@ -2,7 +2,7 @@ from dataclasses import dataclass from enum import IntEnum -from typing import List, Optional, Tuple +from typing import Optional import chia_rs @@ -100,7 +100,7 @@ class RejectHeaderRequest(Streamable): class RequestRemovals(Streamable): height: uint32 header_hash: bytes32 - coin_names: Optional[List[bytes32]] + coin_names: Optional[list[bytes32]] @streamable @@ -108,8 +108,8 @@ class RequestRemovals(Streamable): class RespondRemovals(Streamable): height: uint32 header_hash: bytes32 - coins: List[Tuple[bytes32, Optional[Coin]]] - proofs: Optional[List[Tuple[bytes32, bytes]]] + coins: list[tuple[bytes32, Optional[Coin]]] + proofs: Optional[list[tuple[bytes32, bytes]]] @streamable @@ -124,7 +124,7 @@ class RejectRemovalsRequest(Streamable): class RequestAdditions(Streamable): height: uint32 header_hash: Optional[bytes32] - puzzle_hashes: Optional[List[bytes32]] + puzzle_hashes: Optional[list[bytes32]] @streamable @@ -132,8 +132,8 @@ class RequestAdditions(Streamable): class RespondAdditions(Streamable): height: uint32 header_hash: bytes32 - coins: List[Tuple[bytes32, List[Coin]]] - proofs: Optional[List[Tuple[bytes32, bytes, Optional[bytes]]]] + coins: list[tuple[bytes32, list[Coin]]] + proofs: Optional[list[tuple[bytes32, bytes, Optional[bytes]]]] @streamable @@ -148,7 +148,7 @@ class RejectAdditionsRequest(Streamable): class RespondBlockHeaders(Streamable): start_height: uint32 end_height: uint32 - header_blocks: List[HeaderBlock] + header_blocks: list[HeaderBlock] @streamable @@ -185,7 +185,7 @@ class RejectHeaderBlocks(Streamable): class RespondHeaderBlocks(Streamable): start_height: uint32 end_height: uint32 - header_blocks: List[HeaderBlock] + header_blocks: list[HeaderBlock] # This class is implemented in Rust @@ -200,7 +200,7 @@ class RespondHeaderBlocks(Streamable): @streamable @dataclass(frozen=True) class RegisterForPhUpdates(Streamable): - puzzle_hashes: List[bytes32] + puzzle_hashes: list[bytes32] min_height: uint32 @@ -216,16 +216,16 @@ class RegisterForPhUpdates(Streamable): @streamable @dataclass(frozen=True) class RegisterForCoinUpdates(Streamable): - coin_ids: List[bytes32] + coin_ids: list[bytes32] min_height: uint32 @streamable @dataclass(frozen=True) class RespondToCoinUpdates(Streamable): - coin_ids: List[bytes32] + coin_ids: list[bytes32] min_height: uint32 - coin_states: List[CoinState] + coin_states: list[CoinState] @streamable @@ -234,7 +234,7 @@ class CoinStateUpdate(Streamable): height: uint32 fork_height: uint32 peak_hash: bytes32 - items: List[CoinState] + items: list[CoinState] @streamable @@ -246,7 +246,7 @@ class RequestChildren(Streamable): @streamable @dataclass(frozen=True) class RespondChildren(Streamable): - coin_states: List[CoinState] + coin_states: list[CoinState] @streamable @@ -259,8 +259,8 @@ class RequestSESInfo(Streamable): @streamable @dataclass(frozen=True) class RespondSESInfo(Streamable): - reward_chain_hash: List[bytes32] - heights: List[List[uint32]] + reward_chain_hash: list[bytes32] + heights: list[list[uint32]] @streamable @@ -270,7 +270,7 @@ class RequestFeeEstimates(Streamable): time_targets (List[uint64]): Epoch timestamps in seconds to estimate FeeRates for. """ - time_targets: List[uint64] + time_targets: list[uint64] @streamable @@ -282,25 +282,25 @@ class RespondFeeEstimates(Streamable): @streamable @dataclass(frozen=True) class RequestRemovePuzzleSubscriptions(Streamable): - puzzle_hashes: Optional[List[bytes32]] + puzzle_hashes: Optional[list[bytes32]] @streamable @dataclass(frozen=True) class RespondRemovePuzzleSubscriptions(Streamable): - puzzle_hashes: List[bytes32] + puzzle_hashes: list[bytes32] @streamable @dataclass(frozen=True) class RequestRemoveCoinSubscriptions(Streamable): - coin_ids: Optional[List[bytes32]] + coin_ids: Optional[list[bytes32]] @streamable @dataclass(frozen=True) class RespondRemoveCoinSubscriptions(Streamable): - coin_ids: List[bytes32] + coin_ids: list[bytes32] @streamable @@ -315,7 +315,7 @@ class CoinStateFilters(Streamable): @streamable @dataclass(frozen=True) class RequestPuzzleState(Streamable): - puzzle_hashes: List[bytes32] + puzzle_hashes: list[bytes32] previous_height: Optional[uint32] header_hash: bytes32 filters: CoinStateFilters @@ -325,11 +325,11 @@ class RequestPuzzleState(Streamable): @streamable @dataclass(frozen=True) class RespondPuzzleState(Streamable): - puzzle_hashes: List[bytes32] + puzzle_hashes: list[bytes32] height: uint32 header_hash: bytes32 is_finished: bool - coin_states: List[CoinState] + coin_states: list[CoinState] @streamable @@ -341,7 +341,7 @@ class RejectPuzzleState(Streamable): @streamable @dataclass(frozen=True) class RequestCoinState(Streamable): - coin_ids: List[bytes32] + coin_ids: list[bytes32] previous_height: Optional[uint32] header_hash: bytes32 subscribe: bool @@ -350,8 +350,8 @@ class RequestCoinState(Streamable): @streamable @dataclass(frozen=True) class RespondCoinState(Streamable): - coin_ids: List[bytes32] - coin_states: List[CoinState] + coin_ids: list[bytes32] + coin_states: list[CoinState] @streamable @@ -375,13 +375,13 @@ class RemovedMempoolItem(Streamable): @streamable @dataclass(frozen=True) class MempoolItemsAdded(Streamable): - transaction_ids: List[bytes32] + transaction_ids: list[bytes32] @streamable @dataclass(frozen=True) class MempoolItemsRemoved(Streamable): - removed_items: List[RemovedMempoolItem] + removed_items: list[RemovedMempoolItem] @streamable diff --git a/chia/rpc/crawler_rpc_api.py b/chia/rpc/crawler_rpc_api.py index b4122f0f1957..3d7ff0aee842 100644 --- a/chia/rpc/crawler_rpc_api.py +++ b/chia/rpc/crawler_rpc_api.py @@ -1,7 +1,7 @@ from __future__ import annotations import ipaddress -from typing import Any, Dict, List, Optional +from typing import Any, Optional from chia.rpc.rpc_server import Endpoint, EndpointResult from chia.seeder.crawler import Crawler @@ -13,13 +13,13 @@ def __init__(self, crawler: Crawler): self.service = crawler self.service_name = "chia_crawler" - def get_routes(self) -> Dict[str, Endpoint]: + def get_routes(self) -> dict[str, Endpoint]: return { "/get_peer_counts": self.get_peer_counts, "/get_ips_after_timestamp": self.get_ips_after_timestamp, } - async def _state_changed(self, change: str, change_data: Optional[Dict[str, Any]] = None) -> List[WsRpcMessage]: + async def _state_changed(self, change: str, change_data: Optional[dict[str, Any]] = None) -> list[WsRpcMessage]: payloads = [] if change_data is None: @@ -30,7 +30,7 @@ async def _state_changed(self, change: str, change_data: Optional[Dict[str, Any] return payloads - async def get_peer_counts(self, _request: Dict[str, Any]) -> EndpointResult: + async def get_peer_counts(self, _request: dict[str, Any]) -> EndpointResult: ipv6_addresses_count = 0 for host in self.service.best_timestamp_per_peer.keys(): try: @@ -54,7 +54,7 @@ async def get_peer_counts(self, _request: Dict[str, Any]) -> EndpointResult: } return data - async def get_ips_after_timestamp(self, _request: Dict[str, Any]) -> EndpointResult: + async def get_ips_after_timestamp(self, _request: dict[str, Any]) -> EndpointResult: after = _request.get("after", None) if after is None: raise ValueError("`after` is required and must be a unix timestamp") @@ -62,7 +62,7 @@ async def get_ips_after_timestamp(self, _request: Dict[str, Any]) -> EndpointRes offset = _request.get("offset", 0) limit = _request.get("limit", 10000) - matched_ips: List[str] = [] + matched_ips: list[str] = [] for ip, timestamp in self.service.best_timestamp_per_peer.items(): if timestamp > after: matched_ips.append(ip) diff --git a/chia/rpc/data_layer_rpc_api.py b/chia/rpc/data_layer_rpc_api.py index e62faf7dc69e..97bdf95b1848 100644 --- a/chia/rpc/data_layer_rpc_api.py +++ b/chia/rpc/data_layer_rpc_api.py @@ -2,7 +2,7 @@ import dataclasses from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union, cast +from typing import TYPE_CHECKING, Any, Optional, Union, cast from chia.data_layer.data_layer_errors import OfferIntegrityError from chia.data_layer.data_layer_util import ( @@ -44,7 +44,7 @@ from chia.data_layer.data_layer import DataLayer -def process_change(change: Dict[str, Any]) -> Dict[str, Any]: +def process_change(change: dict[str, Any]) -> dict[str, Any]: # TODO: A full class would likely be nice for this so downstream doesn't # have to deal with maybe-present attributes or Dict[str, Any] hints. reference_node_hash = change.get("reference_node_hash") @@ -68,20 +68,20 @@ def process_change(change: Dict[str, Any]) -> Dict[str, Any]: } -def process_change_multistore(update: Dict[str, Any]) -> Dict[str, Any]: +def process_change_multistore(update: dict[str, Any]) -> dict[str, Any]: store_id = update.get("store_id") if store_id is None: raise Exception("Each update must specify a store_id") changelist = update.get("changelist") if changelist is None: raise Exception("Each update must specify a changelist") - res: Dict[str, Any] = {} + res: dict[str, Any] = {} res["store_id"] = bytes32.from_hexstr(store_id) res["changelist"] = [process_change(change) for change in changelist] return res -def get_fee(config: Dict[str, Any], request: Dict[str, Any]) -> uint64: +def get_fee(config: dict[str, Any], request: dict[str, Any]) -> uint64: fee = request.get("fee") if fee is None: fee = 0 # DL no longer reads the fee from the config @@ -94,7 +94,7 @@ def __init__(self, data_layer: DataLayer): # , wallet: DataLayerWallet): self.service: DataLayer = data_layer self.service_name = "chia_data_layer" - def get_routes(self) -> Dict[str, Endpoint]: + def get_routes(self) -> dict[str, Endpoint]: return { "/wallet_log_in": self.wallet_log_in, "/create_data_store": self.create_data_store, @@ -133,17 +133,17 @@ def get_routes(self) -> Dict[str, Endpoint]: "/verify_proof": self.verify_proof, } - async def _state_changed(self, change: str, change_data: Optional[Dict[str, Any]]) -> List[WsRpcMessage]: + async def _state_changed(self, change: str, change_data: Optional[dict[str, Any]]) -> list[WsRpcMessage]: return [] - async def wallet_log_in(self, request: Dict[str, Any]) -> EndpointResult: + async def wallet_log_in(self, request: dict[str, Any]) -> EndpointResult: if self.service is None: raise Exception("Data layer not created") fingerprint = cast(int, request["fingerprint"]) await self.service.wallet_log_in(fingerprint=fingerprint) return {} - async def create_data_store(self, request: Dict[str, Any]) -> EndpointResult: + async def create_data_store(self, request: dict[str, Any]) -> EndpointResult: if self.service is None: raise Exception("Data layer not created") fee = get_fee(self.service.config, request) @@ -154,13 +154,13 @@ async def create_data_store(self, request: Dict[str, Any]) -> EndpointResult: else: return {"id": value.hex()} - async def get_owned_stores(self, request: Dict[str, Any]) -> EndpointResult: + async def get_owned_stores(self, request: dict[str, Any]) -> EndpointResult: if self.service is None: raise Exception("Data layer not created") singleton_records = await self.service.get_owned_stores() return {"store_ids": [singleton.launcher_id.hex() for singleton in singleton_records]} - async def get_value(self, request: Dict[str, Any]) -> EndpointResult: + async def get_value(self, request: dict[str, Any]) -> EndpointResult: store_id = bytes32.from_hexstr(request["id"]) key = hexstr_to_bytes(request["key"]) # NOTE: being outside the rpc, this retains the none-means-unspecified semantics @@ -178,7 +178,7 @@ async def get_value(self, request: Dict[str, Any]) -> EndpointResult: hex = value.hex() return {"value": hex} - async def get_keys(self, request: Dict[str, Any]) -> EndpointResult: + async def get_keys(self, request: dict[str, Any]) -> EndpointResult: store_id = bytes32.from_hexstr(request["id"]) # NOTE: being outside the rpc, this retains the none-means-unspecified semantics root_hash: Optional[str] = request.get("root_hash") @@ -215,7 +215,7 @@ async def get_keys(self, request: Dict[str, Any]) -> EndpointResult: return response - async def get_keys_values(self, request: Dict[str, Any]) -> EndpointResult: + async def get_keys_values(self, request: dict[str, Any]) -> EndpointResult: store_id = bytes32.from_hexstr(request["id"]) # NOTE: being outside the rpc, this retains the none-means-unspecified semantics root_hash: Optional[str] = request.get("root_hash") @@ -255,7 +255,7 @@ async def get_keys_values(self, request: Dict[str, Any]) -> EndpointResult: return response - async def get_ancestors(self, request: Dict[str, Any]) -> EndpointResult: + async def get_ancestors(self, request: dict[str, Any]) -> EndpointResult: store_id = bytes32.from_hexstr(request["id"]) node_hash = bytes32.from_hexstr(request["hash"]) if self.service is None: @@ -263,7 +263,7 @@ async def get_ancestors(self, request: Dict[str, Any]) -> EndpointResult: value = await self.service.get_ancestors(node_hash, store_id) return {"ancestors": value} - async def batch_update(self, request: Dict[str, Any]) -> EndpointResult: + async def batch_update(self, request: dict[str, Any]) -> EndpointResult: """ id - the id of the store we are operating on changelist - a list of changes to apply on store @@ -285,7 +285,7 @@ async def batch_update(self, request: Dict[str, Any]) -> EndpointResult: raise Exception("Transaction submitted on chain, but submit_on_chain set to False") return {} - async def multistore_batch_update(self, request: Dict[str, Any]) -> EndpointResult: + async def multistore_batch_update(self, request: dict[str, Any]) -> EndpointResult: fee = get_fee(self.service.config, request) store_updates = [process_change_multistore(update) for update in request["store_updates"]] submit_on_chain = request.get("submit_on_chain", True) @@ -301,18 +301,18 @@ async def multistore_batch_update(self, request: Dict[str, Any]) -> EndpointResu raise Exception("Transaction submitted on chain, but submit_on_chain set to False") return {} - async def submit_pending_root(self, request: Dict[str, Any]) -> EndpointResult: + async def submit_pending_root(self, request: dict[str, Any]) -> EndpointResult: store_id = bytes32.from_hexstr(request["id"]) fee = get_fee(self.service.config, request) transaction_record = await self.service.submit_pending_root(store_id, uint64(fee)) return {"tx_id": transaction_record.name} - async def submit_all_pending_roots(self, request: Dict[str, Any]) -> EndpointResult: + async def submit_all_pending_roots(self, request: dict[str, Any]) -> EndpointResult: fee = get_fee(self.service.config, request) transaction_records = await self.service.submit_all_pending_roots(uint64(fee)) return {"tx_id": [transaction_record.name for transaction_record in transaction_records]} - async def insert(self, request: Dict[str, Any]) -> EndpointResult: + async def insert(self, request: dict[str, Any]) -> EndpointResult: """ rows_to_add a list of clvm objects as bytes to add to table rows_to_remove a list of row hashes to remove @@ -329,7 +329,7 @@ async def insert(self, request: Dict[str, Any]) -> EndpointResult: assert transaction_record is not None return {"tx_id": transaction_record.name} - async def delete_key(self, request: Dict[str, Any]) -> EndpointResult: + async def delete_key(self, request: dict[str, Any]) -> EndpointResult: """ rows_to_add a list of clvm objects as bytes to add to table rows_to_remove a list of row hashes to remove @@ -345,7 +345,7 @@ async def delete_key(self, request: Dict[str, Any]) -> EndpointResult: assert transaction_record is not None return {"tx_id": transaction_record.name} - async def get_root(self, request: Dict[str, Any]) -> EndpointResult: + async def get_root(self, request: dict[str, Any]) -> EndpointResult: """get hash of latest tree root""" store_id = bytes32.from_hexstr(request["id"]) # todo input checks @@ -356,7 +356,7 @@ async def get_root(self, request: Dict[str, Any]) -> EndpointResult: raise Exception(f"Failed to get root for {store_id.hex()}") return {"hash": rec.root, "confirmed": rec.confirmed, "timestamp": rec.timestamp} - async def get_local_root(self, request: Dict[str, Any]) -> EndpointResult: + async def get_local_root(self, request: dict[str, Any]) -> EndpointResult: """get hash of latest tree root saved in our local datastore""" store_id = bytes32.from_hexstr(request["id"]) # todo input checks @@ -365,7 +365,7 @@ async def get_local_root(self, request: Dict[str, Any]) -> EndpointResult: res = await self.service.get_local_root(store_id) return {"hash": res} - async def get_roots(self, request: Dict[str, Any]) -> EndpointResult: + async def get_roots(self, request: dict[str, Any]) -> EndpointResult: """ get state hashes for a list of roots """ @@ -381,7 +381,7 @@ async def get_roots(self, request: Dict[str, Any]) -> EndpointResult: roots.append({"id": id_bytes, "hash": rec.root, "confirmed": rec.confirmed, "timestamp": rec.timestamp}) return {"root_hashes": roots} - async def subscribe(self, request: Dict[str, Any]) -> EndpointResult: + async def subscribe(self, request: dict[str, Any]) -> EndpointResult: """ subscribe to singleton """ @@ -396,7 +396,7 @@ async def subscribe(self, request: Dict[str, Any]) -> EndpointResult: await self.service.subscribe(store_id=store_id_bytes, urls=urls) return {} - async def unsubscribe(self, request: Dict[str, Any]) -> EndpointResult: + async def unsubscribe(self, request: dict[str, Any]) -> EndpointResult: """ unsubscribe from singleton """ @@ -410,16 +410,16 @@ async def unsubscribe(self, request: Dict[str, Any]) -> EndpointResult: await self.service.unsubscribe(store_id_bytes, retain_data) return {} - async def subscriptions(self, request: Dict[str, Any]) -> EndpointResult: + async def subscriptions(self, request: dict[str, Any]) -> EndpointResult: """ List current subscriptions """ if self.service is None: raise Exception("Data layer not created") - subscriptions: List[Subscription] = await self.service.get_subscriptions() + subscriptions: list[Subscription] = await self.service.get_subscriptions() return {"store_ids": [sub.store_id.hex() for sub in subscriptions]} - async def remove_subscriptions(self, request: Dict[str, Any]) -> EndpointResult: + async def remove_subscriptions(self, request: dict[str, Any]) -> EndpointResult: if self.service is None: raise Exception("Data layer not created") store_id = request.get("id") @@ -430,7 +430,7 @@ async def remove_subscriptions(self, request: Dict[str, Any]) -> EndpointResult: await self.service.remove_subscriptions(store_id=store_id_bytes, urls=urls) return {} - async def add_missing_files(self, request: Dict[str, Any]) -> EndpointResult: + async def add_missing_files(self, request: dict[str, Any]) -> EndpointResult: """ complete the data server files. """ @@ -438,7 +438,7 @@ async def add_missing_files(self, request: Dict[str, Any]) -> EndpointResult: store_ids = request["ids"] ids_bytes = [bytes32.from_hexstr(id) for id in store_ids] else: - subscriptions: List[Subscription] = await self.service.get_subscriptions() + subscriptions: list[Subscription] = await self.service.get_subscriptions() ids_bytes = [subscription.store_id for subscription in subscriptions] overwrite = request.get("overwrite", False) foldername: Optional[Path] = None @@ -448,7 +448,7 @@ async def add_missing_files(self, request: Dict[str, Any]) -> EndpointResult: await self.service.add_missing_files(store_id, overwrite, foldername) return {} - async def get_root_history(self, request: Dict[str, Any]) -> EndpointResult: + async def get_root_history(self, request: dict[str, Any]) -> EndpointResult: """ get history of state hashes for a store """ @@ -457,12 +457,12 @@ async def get_root_history(self, request: Dict[str, Any]) -> EndpointResult: store_id = request["id"] id_bytes = bytes32.from_hexstr(store_id) records = await self.service.get_root_history(id_bytes) - res: List[Dict[str, Any]] = [] + res: list[dict[str, Any]] = [] for rec in records: res.insert(0, {"root_hash": rec.root, "confirmed": rec.confirmed, "timestamp": rec.timestamp}) return {"root_history": res} - async def get_kv_diff(self, request: Dict[str, Any]) -> EndpointResult: + async def get_kv_diff(self, request: dict[str, Any]) -> EndpointResult: """ get kv diff between two root hashes """ @@ -476,7 +476,7 @@ async def get_kv_diff(self, request: Dict[str, Any]) -> EndpointResult: hash_2_bytes = bytes32.from_hexstr(hash_2) page = request.get("page", None) max_page_size = request.get("max_page_size", None) - res: List[Dict[str, Any]] = [] + res: list[dict[str, Any]] = [] if page is None: records_dict = await self.service.get_kv_diff(id_bytes, hash_1_bytes, hash_2_bytes) @@ -501,7 +501,7 @@ async def get_kv_diff(self, request: Dict[str, Any]) -> EndpointResult: return response - async def add_mirror(self, request: Dict[str, Any]) -> EndpointResult: + async def add_mirror(self, request: dict[str, Any]) -> EndpointResult: store_id = request["id"] id_bytes = bytes32.from_hexstr(store_id) urls = request["urls"] @@ -510,17 +510,17 @@ async def add_mirror(self, request: Dict[str, Any]) -> EndpointResult: await self.service.add_mirror(id_bytes, urls, amount, fee) return {} - async def delete_mirror(self, request: Dict[str, Any]) -> EndpointResult: + async def delete_mirror(self, request: dict[str, Any]) -> EndpointResult: coin_id = request["coin_id"] coin_id_bytes = bytes32.from_hexstr(coin_id) fee = get_fee(self.service.config, request) await self.service.delete_mirror(coin_id_bytes, fee) return {} - async def get_mirrors(self, request: Dict[str, Any]) -> EndpointResult: + async def get_mirrors(self, request: dict[str, Any]) -> EndpointResult: store_id = request["id"] id_bytes = bytes32.from_hexstr(store_id) - mirrors: List[Mirror] = await self.service.get_mirrors(id_bytes) + mirrors: list[Mirror] = await self.service.get_mirrors(id_bytes) return {"mirrors": [mirror.to_json_dict() for mirror in mirrors]} @marshal() # type: ignore[arg-type] @@ -566,7 +566,7 @@ async def cancel_offer(self, request: CancelOfferRequest) -> CancelOfferResponse return CancelOfferResponse(success=True) - async def get_sync_status(self, request: Dict[str, Any]) -> EndpointResult: + async def get_sync_status(self, request: dict[str, Any]) -> EndpointResult: store_id = request["id"] id_bytes = bytes32.from_hexstr(store_id) if self.service is None: @@ -582,7 +582,7 @@ async def get_sync_status(self, request: Dict[str, Any]) -> EndpointResult: } } - async def check_plugins(self, request: Dict[str, Any]) -> EndpointResult: + async def check_plugins(self, request: dict[str, Any]) -> EndpointResult: if self.service is None: raise Exception("Data layer not created") plugin_status = await self.service.check_plugins() @@ -601,7 +601,7 @@ async def get_proof(self, request: GetProofRequest) -> GetProofResponse: if root is None: raise ValueError("no root") - all_proofs: List[HashOnlyProof] = [] + all_proofs: list[HashOnlyProof] = [] for key in request.keys: node = await self.service.data_store.get_node_by_key(store_id=request.store_id, key=key) pi = await self.service.data_store.get_proof_of_inclusion_by_hash( diff --git a/chia/rpc/data_layer_rpc_client.py b/chia/rpc/data_layer_rpc_client.py index 31fd1617c3d1..a7aa61fff472 100644 --- a/chia/rpc/data_layer_rpc_client.py +++ b/chia/rpc/data_layer_rpc_client.py @@ -1,7 +1,7 @@ from __future__ import annotations from pathlib import Path -from typing import Any, Dict, List, Optional +from typing import Any, Optional from chia.data_layer.data_layer_util import ClearPendingRootsRequest from chia.rpc.rpc_client import RpcClient @@ -10,25 +10,25 @@ class DataLayerRpcClient(RpcClient): - async def create_data_store(self, fee: Optional[uint64], verbose: bool) -> Dict[str, Any]: + async def create_data_store(self, fee: Optional[uint64], verbose: bool) -> dict[str, Any]: response = await self.fetch("create_data_store", {"fee": fee, "verbose": verbose}) return response - async def wallet_log_in(self, fingerprint: int) -> Dict[str, Any]: - request: Dict[str, Any] = {"fingerprint": fingerprint} + async def wallet_log_in(self, fingerprint: int) -> dict[str, Any]: + request: dict[str, Any] = {"fingerprint": fingerprint} response = await self.fetch("wallet_log_in", request) return response - async def get_value(self, store_id: bytes32, key: bytes, root_hash: Optional[bytes32]) -> Dict[str, Any]: - request: Dict[str, Any] = {"id": store_id.hex(), "key": key.hex()} + async def get_value(self, store_id: bytes32, key: bytes, root_hash: Optional[bytes32]) -> dict[str, Any]: + request: dict[str, Any] = {"id": store_id.hex(), "key": key.hex()} if root_hash is not None: request["root_hash"] = root_hash.hex() response = await self.fetch("get_value", request) return response async def update_data_store( - self, store_id: bytes32, changelist: List[Dict[str, str]], fee: Optional[uint64], submit_on_chain: bool = True - ) -> Dict[str, Any]: + self, store_id: bytes32, changelist: list[dict[str, str]], fee: Optional[uint64], submit_on_chain: bool = True + ) -> dict[str, Any]: response = await self.fetch( "batch_update", { @@ -41,8 +41,8 @@ async def update_data_store( return response async def update_multiple_stores( - self, store_updates: List[Dict[str, Any]], fee: Optional[uint64], submit_on_chain: bool = True - ) -> Dict[str, Any]: + self, store_updates: list[dict[str, Any]], fee: Optional[uint64], submit_on_chain: bool = True + ) -> dict[str, Any]: response = await self.fetch( "multistore_batch_update", { @@ -53,18 +53,18 @@ async def update_multiple_stores( ) return response - async def submit_pending_root(self, store_id: bytes32, fee: Optional[uint64]) -> Dict[str, Any]: + async def submit_pending_root(self, store_id: bytes32, fee: Optional[uint64]) -> dict[str, Any]: response = await self.fetch("submit_pending_root", {"id": store_id.hex(), "fee": fee}) return response - async def submit_all_pending_roots(self, fee: Optional[uint64]) -> Dict[str, Any]: + async def submit_all_pending_roots(self, fee: Optional[uint64]) -> dict[str, Any]: response = await self.fetch("submit_all_pending_roots", {"fee": fee}) return response async def get_keys_values( self, store_id: bytes32, root_hash: Optional[bytes32], page: Optional[int], max_page_size: Optional[int] - ) -> Dict[str, Any]: - request: Dict[str, Any] = {"id": store_id.hex()} + ) -> dict[str, Any]: + request: dict[str, Any] = {"id": store_id.hex()} if root_hash is not None: request["root_hash"] = root_hash.hex() if page is not None: @@ -76,8 +76,8 @@ async def get_keys_values( async def get_keys( self, store_id: bytes32, root_hash: Optional[bytes32], page: Optional[int], max_page_size: Optional[int] - ) -> Dict[str, Any]: - request: Dict[str, Any] = {"id": store_id.hex()} + ) -> dict[str, Any]: + request: dict[str, Any] = {"id": store_id.hex()} if root_hash is not None: request["root_hash"] = root_hash.hex() if page is not None: @@ -87,38 +87,38 @@ async def get_keys( response = await self.fetch("get_keys", request) return response - async def get_ancestors(self, store_id: bytes32, hash: bytes32) -> Dict[str, Any]: + async def get_ancestors(self, store_id: bytes32, hash: bytes32) -> dict[str, Any]: response = await self.fetch("get_ancestors", {"id": store_id.hex(), "hash": hash}) return response - async def get_root(self, store_id: bytes32) -> Dict[str, Any]: + async def get_root(self, store_id: bytes32) -> dict[str, Any]: response = await self.fetch("get_root", {"id": store_id.hex()}) return response - async def get_local_root(self, store_id: bytes32) -> Dict[str, Any]: + async def get_local_root(self, store_id: bytes32) -> dict[str, Any]: response = await self.fetch("get_local_root", {"id": store_id.hex()}) return response - async def get_roots(self, store_ids: List[bytes32]) -> Dict[str, Any]: + async def get_roots(self, store_ids: list[bytes32]) -> dict[str, Any]: response = await self.fetch("get_roots", {"ids": store_ids}) return response - async def subscribe(self, store_id: bytes32, urls: List[str]) -> Dict[str, Any]: + async def subscribe(self, store_id: bytes32, urls: list[str]) -> dict[str, Any]: response = await self.fetch("subscribe", {"id": store_id.hex(), "urls": urls}) return response - async def remove_subscriptions(self, store_id: bytes32, urls: List[str]) -> Dict[str, Any]: + async def remove_subscriptions(self, store_id: bytes32, urls: list[str]) -> dict[str, Any]: response = await self.fetch("remove_subscriptions", {"id": store_id.hex(), "urls": urls}) return response - async def unsubscribe(self, store_id: bytes32, retain: bool) -> Dict[str, Any]: + async def unsubscribe(self, store_id: bytes32, retain: bool) -> dict[str, Any]: response = await self.fetch("unsubscribe", {"id": store_id.hex(), "retain": retain}) return response async def add_missing_files( - self, store_ids: Optional[List[bytes32]], overwrite: Optional[bool], foldername: Optional[Path] - ) -> Dict[str, Any]: - request: Dict[str, Any] = {} + self, store_ids: Optional[list[bytes32]], overwrite: Optional[bool], foldername: Optional[Path] + ) -> dict[str, Any]: + request: dict[str, Any] = {} if store_ids is not None: request["ids"] = [store_id.hex() for store_id in store_ids] if overwrite is not None: @@ -130,8 +130,8 @@ async def add_missing_files( async def get_kv_diff( self, store_id: bytes32, hash_1: bytes32, hash_2: bytes32, page: Optional[int], max_page_size: Optional[int] - ) -> Dict[str, Any]: - request: Dict[str, Any] = {"id": store_id.hex(), "hash_1": hash_1.hex(), "hash_2": hash_2.hex()} + ) -> dict[str, Any]: + request: dict[str, Any] = {"id": store_id.hex(), "hash_1": hash_1.hex(), "hash_2": hash_2.hex()} if page is not None: request["page"] = page if max_page_size is not None: @@ -139,50 +139,50 @@ async def get_kv_diff( response = await self.fetch("get_kv_diff", request) return response - async def get_root_history(self, store_id: bytes32) -> Dict[str, Any]: + async def get_root_history(self, store_id: bytes32) -> dict[str, Any]: response = await self.fetch("get_root_history", {"id": store_id.hex()}) return response async def add_mirror( - self, store_id: bytes32, urls: List[str], amount: int, fee: Optional[uint64] - ) -> Dict[str, Any]: + self, store_id: bytes32, urls: list[str], amount: int, fee: Optional[uint64] + ) -> dict[str, Any]: response = await self.fetch("add_mirror", {"id": store_id.hex(), "urls": urls, "amount": amount, "fee": fee}) return response - async def delete_mirror(self, coin_id: bytes32, fee: Optional[uint64]) -> Dict[str, Any]: + async def delete_mirror(self, coin_id: bytes32, fee: Optional[uint64]) -> dict[str, Any]: response = await self.fetch("delete_mirror", {"coin_id": coin_id.hex(), "fee": fee}) return response - async def get_mirrors(self, store_id: bytes32) -> Dict[str, Any]: + async def get_mirrors(self, store_id: bytes32) -> dict[str, Any]: response = await self.fetch("get_mirrors", {"id": store_id.hex()}) return response - async def get_subscriptions(self) -> Dict[str, Any]: + async def get_subscriptions(self) -> dict[str, Any]: response = await self.fetch("subscriptions", {}) return response - async def get_owned_stores(self) -> Dict[str, Any]: + async def get_owned_stores(self) -> dict[str, Any]: response = await self.fetch("get_owned_stores", {}) return response - async def get_sync_status(self, store_id: bytes32) -> Dict[str, Any]: + async def get_sync_status(self, store_id: bytes32) -> dict[str, Any]: response = await self.fetch("get_sync_status", {"id": store_id.hex()}) return response - async def check_plugins(self) -> Dict[str, Any]: + async def check_plugins(self) -> dict[str, Any]: response = await self.fetch("check_plugins", {}) return response - async def clear_pending_roots(self, store_id: bytes32) -> Dict[str, Any]: + async def clear_pending_roots(self, store_id: bytes32) -> dict[str, Any]: request = ClearPendingRootsRequest(store_id=store_id) response = await self.fetch("clear_pending_roots", request.marshal()) return response - async def get_proof(self, store_id: bytes32, keys: List[bytes]) -> Dict[str, Any]: - request: Dict[str, Any] = {"store_id": store_id.hex(), "keys": [key.hex() for key in keys]} + async def get_proof(self, store_id: bytes32, keys: list[bytes]) -> dict[str, Any]: + request: dict[str, Any] = {"store_id": store_id.hex(), "keys": [key.hex() for key in keys]} response = await self.fetch("get_proof", request) return response - async def verify_proof(self, proof: Dict[str, Any]) -> Dict[str, Any]: + async def verify_proof(self, proof: dict[str, Any]) -> dict[str, Any]: response = await self.fetch("verify_proof", proof) return response diff --git a/chia/rpc/data_layer_rpc_util.py b/chia/rpc/data_layer_rpc_util.py index d1f7413c585f..315152222f40 100644 --- a/chia/rpc/data_layer_rpc_util.py +++ b/chia/rpc/data_layer_rpc_util.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Any, Dict, Type, TypeVar +from typing import Any, TypeVar from typing_extensions import Protocol @@ -13,13 +13,13 @@ class MarshallableProtocol(Protocol): @classmethod - def unmarshal(cls: Type[_T], marshalled: Dict[str, Any]) -> _T: ... + def unmarshal(cls: type[_T], marshalled: dict[str, Any]) -> _T: ... - def marshal(self) -> Dict[str, Any]: ... + def marshal(self) -> dict[str, Any]: ... class UnboundRoute(Protocol): - async def __call__(self, request: Dict[str, Any]) -> Dict[str, Any]: + async def __call__(self, request: dict[str, Any]) -> dict[str, Any]: pass @@ -42,9 +42,9 @@ def decorator(route: UnboundMarshalledRoute) -> UnboundRoute: from typing import get_type_hints hints = get_type_hints(route) - request_class: Type[MarshallableProtocol] = hints["request"] + request_class: type[MarshallableProtocol] = hints["request"] - async def wrapper(self: object, request: Dict[str, object]) -> Dict[str, object]: + async def wrapper(self: object, request: dict[str, object]) -> dict[str, object]: # import json # name = route.__name__ # print(f"\n ==== {name} request.json\n{json.dumps(request, indent=2)}") diff --git a/chia/rpc/farmer_rpc_api.py b/chia/rpc/farmer_rpc_api.py index 2bd84fce95d0..8415a119781b 100644 --- a/chia/rpc/farmer_rpc_api.py +++ b/chia/rpc/farmer_rpc_api.py @@ -2,7 +2,7 @@ import dataclasses import operator -from typing import Any, Callable, ClassVar, Dict, List, Optional, Tuple +from typing import Any, Callable, ClassVar, Optional from typing_extensions import Protocol @@ -23,7 +23,7 @@ class PaginatedRequestData(Protocol): page: uint32 page_size: uint32 - __match_args__: ClassVar[Tuple[str, ...]] = () + __match_args__: ClassVar[tuple[str, ...]] = () @streamable @@ -39,11 +39,11 @@ class PlotInfoRequestData(Streamable): node_id: bytes32 page: uint32 page_size: uint32 - filter: List[FilterItem] = dataclasses.field(default_factory=list) + filter: list[FilterItem] = dataclasses.field(default_factory=list) sort_key: str = "filename" reverse: bool = False - __match_args__: ClassVar[Tuple[str, ...]] = () + __match_args__: ClassVar[tuple[str, ...]] = () @streamable @@ -52,13 +52,13 @@ class PlotPathRequestData(Streamable): node_id: bytes32 page: uint32 page_size: uint32 - filter: List[str] = dataclasses.field(default_factory=list) + filter: list[str] = dataclasses.field(default_factory=list) reverse: bool = False - __match_args__: ClassVar[Tuple[str, ...]] = () + __match_args__: ClassVar[tuple[str, ...]] = () -def paginated_plot_request(source: List[Any], request: PaginatedRequestData) -> Dict[str, object]: +def paginated_plot_request(source: list[Any], request: PaginatedRequestData) -> dict[str, object]: paginator: Paginator = Paginator(source, request.page_size) return { "node_id": request.node_id.hex(), @@ -82,7 +82,7 @@ def __init__(self, farmer: Farmer): self.service = farmer self.service_name = "chia_farmer" - def get_routes(self) -> Dict[str, Endpoint]: + def get_routes(self) -> dict[str, Endpoint]: return { "/get_signage_point": self.get_signage_point, "/get_signage_points": self.get_signage_points, @@ -99,7 +99,7 @@ def get_routes(self) -> Dict[str, Endpoint]: "/get_pool_login_link": self.get_pool_login_link, } - async def _state_changed(self, change: str, change_data: Optional[Dict[str, Any]]) -> List[WsRpcMessage]: + async def _state_changed(self, change: str, change_data: Optional[dict[str, Any]]) -> list[WsRpcMessage]: payloads = [] if change_data is None: @@ -233,7 +233,7 @@ async def _state_changed(self, change: str, change_data: Optional[Dict[str, Any] return payloads - async def get_signage_point(self, request: Dict[str, Any]) -> EndpointResult: + async def get_signage_point(self, request: dict[str, Any]) -> EndpointResult: sp_hash = bytes32.from_hexstr(request["sp_hash"]) sps = self.service.sps.get(sp_hash) if sps is None or len(sps) < 1: @@ -253,8 +253,8 @@ async def get_signage_point(self, request: Dict[str, Any]) -> EndpointResult: "proofs": pospaces, } - async def get_signage_points(self, _: Dict[str, Any]) -> EndpointResult: - result: List[Dict[str, Any]] = [] + async def get_signage_points(self, _: dict[str, Any]) -> EndpointResult: + result: list[dict[str, Any]] = [] for sps in self.service.sps.values(): for sp in sps: pospaces = self.service.proofs_of_space.get(sp.challenge_chain_sp, []) @@ -273,12 +273,12 @@ async def get_signage_points(self, _: Dict[str, Any]) -> EndpointResult: ) return {"signage_points": result} - async def get_reward_targets(self, request: Dict[str, Any]) -> EndpointResult: + async def get_reward_targets(self, request: dict[str, Any]) -> EndpointResult: search_for_private_key = request["search_for_private_key"] max_ph_to_search = request.get("max_ph_to_search", 500) return await self.service.get_reward_targets(search_for_private_key, max_ph_to_search) - async def set_reward_targets(self, request: Dict[str, Any]) -> EndpointResult: + async def set_reward_targets(self, request: dict[str, Any]) -> EndpointResult: farmer_target, pool_target = None, None if "farmer_target" in request: farmer_target = request["farmer_target"] @@ -296,7 +296,7 @@ def get_pool_contract_puzzle_hash_plot_count(self, pool_contract_puzzle_hash: by ) return plot_count - async def get_pool_state(self, request: Dict[str, Any]) -> EndpointResult: + async def get_pool_state(self, request: dict[str, Any]) -> EndpointResult: pools_list = [] for p2_singleton_puzzle_hash, pool_dict in self.service.pool_state.items(): pool_state = pool_dict.copy() @@ -304,18 +304,18 @@ async def get_pool_state(self, request: Dict[str, Any]) -> EndpointResult: pools_list.append(pool_state) return {"pool_state": pools_list} - async def set_payout_instructions(self, request: Dict[str, Any]) -> EndpointResult: + async def set_payout_instructions(self, request: dict[str, Any]) -> EndpointResult: launcher_id: bytes32 = bytes32.from_hexstr(request["launcher_id"]) await self.service.set_payout_instructions(launcher_id, request["payout_instructions"]) return {} - async def get_harvesters(self, request: Dict[str, Any]) -> EndpointResult: + async def get_harvesters(self, request: dict[str, Any]) -> EndpointResult: return await self.service.get_harvesters(False) - async def get_harvesters_summary(self, _: Dict[str, object]) -> EndpointResult: + async def get_harvesters_summary(self, _: dict[str, object]) -> EndpointResult: return await self.service.get_harvesters(True) - async def get_harvester_plots_valid(self, request_dict: Dict[str, object]) -> EndpointResult: + async def get_harvester_plots_valid(self, request_dict: dict[str, object]) -> EndpointResult: # TODO: Consider having a extra List[PlotInfo] in Receiver to avoid rebuilding the list for each call request = PlotInfoRequestData.from_json_dict(request_dict) plot_list = list(self.service.get_receiver(request.node_id).plots().values()) @@ -323,7 +323,7 @@ async def get_harvester_plots_valid(self, request_dict: Dict[str, object]) -> En plot_list = [ plot for plot in plot_list if all(plot_matches_filter(plot, filter_item) for filter_item in request.filter) ] - restricted_sort_keys: List[str] = ["pool_contract_puzzle_hash", "pool_public_key", "plot_public_key"] + restricted_sort_keys: list[str] = ["pool_contract_puzzle_hash", "pool_public_key", "plot_public_key"] # Apply sort_key and reverse if sort_key is not restricted if request.sort_key in restricted_sort_keys: raise KeyError(f"Can't sort by optional attributes: {restricted_sort_keys}") @@ -332,8 +332,8 @@ async def get_harvester_plots_valid(self, request_dict: Dict[str, object]) -> En return paginated_plot_request(plot_list, request) def paginated_plot_path_request( - self, source_func: Callable[[Receiver], List[str]], request_dict: Dict[str, object] - ) -> Dict[str, object]: + self, source_func: Callable[[Receiver], list[str]], request_dict: dict[str, object] + ) -> dict[str, object]: request: PlotPathRequestData = PlotPathRequestData.from_json_dict(request_dict) receiver = self.service.get_receiver(request.node_id) source = source_func(receiver) @@ -343,16 +343,16 @@ def paginated_plot_path_request( source = sorted(source, reverse=request.reverse) return paginated_plot_request(source, request) - async def get_harvester_plots_invalid(self, request_dict: Dict[str, object]) -> EndpointResult: + async def get_harvester_plots_invalid(self, request_dict: dict[str, object]) -> EndpointResult: return self.paginated_plot_path_request(Receiver.invalid, request_dict) - async def get_harvester_plots_keys_missing(self, request_dict: Dict[str, object]) -> EndpointResult: + async def get_harvester_plots_keys_missing(self, request_dict: dict[str, object]) -> EndpointResult: return self.paginated_plot_path_request(Receiver.keys_missing, request_dict) - async def get_harvester_plots_duplicates(self, request_dict: Dict[str, object]) -> EndpointResult: + async def get_harvester_plots_duplicates(self, request_dict: dict[str, object]) -> EndpointResult: return self.paginated_plot_path_request(Receiver.duplicates, request_dict) - async def get_pool_login_link(self, request: Dict[str, Any]) -> EndpointResult: + async def get_pool_login_link(self, request: dict[str, Any]) -> EndpointResult: launcher_id: bytes32 = bytes32.from_hexstr(request["launcher_id"]) login_link: Optional[str] = await self.service.generate_login_link(launcher_id) if login_link is None: diff --git a/chia/rpc/farmer_rpc_client.py b/chia/rpc/farmer_rpc_client.py index b5a0db6af004..8161a6031426 100644 --- a/chia/rpc/farmer_rpc_client.py +++ b/chia/rpc/farmer_rpc_client.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Any, Dict, List, Optional, cast +from typing import Any, Optional, cast from chia.rpc.farmer_rpc_api import PlotInfoRequestData, PlotPathRequestData from chia.rpc.rpc_client import RpcClient @@ -17,16 +17,16 @@ class FarmerRpcClient(RpcClient): to the full node. """ - async def get_signage_point(self, sp_hash: bytes32) -> Optional[Dict[str, Any]]: + async def get_signage_point(self, sp_hash: bytes32) -> Optional[dict[str, Any]]: try: return await self.fetch("get_signage_point", {"sp_hash": sp_hash.hex()}) except ValueError: return None - async def get_signage_points(self) -> List[Dict[str, Any]]: - return cast(List[Dict[str, Any]], (await self.fetch("get_signage_points", {}))["signage_points"]) + async def get_signage_points(self) -> list[dict[str, Any]]: + return cast(list[dict[str, Any]], (await self.fetch("get_signage_points", {}))["signage_points"]) - async def get_reward_targets(self, search_for_private_key: bool, max_ph_to_search: int = 500) -> Dict[str, Any]: + async def get_reward_targets(self, search_for_private_key: bool, max_ph_to_search: int = 500) -> dict[str, Any]: response = await self.fetch( "get_reward_targets", {"search_for_private_key": search_for_private_key, "max_ph_to_search": max_ph_to_search}, @@ -45,7 +45,7 @@ async def set_reward_targets( self, farmer_target: Optional[str] = None, pool_target: Optional[str] = None, - ) -> Dict[str, Any]: + ) -> dict[str, Any]: request = {} if farmer_target is not None: request["farmer_target"] = farmer_target @@ -53,29 +53,29 @@ async def set_reward_targets( request["pool_target"] = pool_target return await self.fetch("set_reward_targets", request) - async def get_pool_state(self) -> Dict[str, Any]: + async def get_pool_state(self) -> dict[str, Any]: return await self.fetch("get_pool_state", {}) - async def set_payout_instructions(self, launcher_id: bytes32, payout_instructions: str) -> Dict[str, Any]: + async def set_payout_instructions(self, launcher_id: bytes32, payout_instructions: str) -> dict[str, Any]: request = {"launcher_id": launcher_id.hex(), "payout_instructions": payout_instructions} return await self.fetch("set_payout_instructions", request) - async def get_harvesters(self) -> Dict[str, Any]: + async def get_harvesters(self) -> dict[str, Any]: return await self.fetch("get_harvesters", {}) - async def get_harvesters_summary(self) -> Dict[str, Any]: + async def get_harvesters_summary(self) -> dict[str, Any]: return await self.fetch("get_harvesters_summary", {}) - async def get_harvester_plots_valid(self, request: PlotInfoRequestData) -> Dict[str, Any]: + async def get_harvester_plots_valid(self, request: PlotInfoRequestData) -> dict[str, Any]: return await self.fetch("get_harvester_plots_valid", recurse_jsonify(request)) - async def get_harvester_plots_invalid(self, request: PlotPathRequestData) -> Dict[str, Any]: + async def get_harvester_plots_invalid(self, request: PlotPathRequestData) -> dict[str, Any]: return await self.fetch("get_harvester_plots_invalid", recurse_jsonify(request)) - async def get_harvester_plots_keys_missing(self, request: PlotPathRequestData) -> Dict[str, Any]: + async def get_harvester_plots_keys_missing(self, request: PlotPathRequestData) -> dict[str, Any]: return await self.fetch("get_harvester_plots_keys_missing", recurse_jsonify(request)) - async def get_harvester_plots_duplicates(self, request: PlotPathRequestData) -> Dict[str, Any]: + async def get_harvester_plots_duplicates(self, request: PlotPathRequestData) -> dict[str, Any]: return await self.fetch("get_harvester_plots_duplicates", recurse_jsonify(request)) async def get_pool_login_link(self, launcher_id: bytes32) -> Optional[str]: diff --git a/chia/rpc/full_node_rpc_api.py b/chia/rpc/full_node_rpc_api.py index 976c4f28d628..8f413ad02bb1 100644 --- a/chia/rpc/full_node_rpc_api.py +++ b/chia/rpc/full_node_rpc_api.py @@ -1,7 +1,7 @@ from __future__ import annotations from datetime import datetime, timezone -from typing import Any, Dict, List, Optional +from typing import Any, Optional from chia.consensus.block_record import BlockRecord from chia.consensus.blockchain import Blockchain, BlockchainMutexPriority @@ -33,7 +33,7 @@ from chia.util.ws_message import WsRpcMessage, create_payload_dict -def coin_record_dict_backwards_compat(coin_record: Dict[str, Any]) -> Dict[str, bool]: +def coin_record_dict_backwards_compat(coin_record: dict[str, Any]) -> dict[str, bool]: coin_record["spent"] = coin_record["spent_block_index"] > 0 return coin_record @@ -85,9 +85,9 @@ class FullNodeRpcApi: def __init__(self, service: FullNode) -> None: self.service = service self.service_name = "chia_full_node" - self.cached_blockchain_state: Optional[Dict[str, Any]] = None + self.cached_blockchain_state: Optional[dict[str, Any]] = None - def get_routes(self) -> Dict[str, Endpoint]: + def get_routes(self) -> dict[str, Endpoint]: return { # Blockchain "/get_blockchain_state": self.get_blockchain_state, @@ -122,7 +122,7 @@ def get_routes(self) -> Dict[str, Endpoint]: "/get_fee_estimate": self.get_fee_estimate, } - async def _state_changed(self, change: str, change_data: Optional[Dict[str, Any]] = None) -> List[WsRpcMessage]: + async def _state_changed(self, change: str, change_data: Optional[dict[str, Any]] = None) -> list[WsRpcMessage]: if change_data is None: change_data = {} @@ -155,7 +155,7 @@ async def _state_changed(self, change: str, change_data: Optional[Dict[str, Any] return payloads - async def get_blockchain_state(self, _: Dict[str, Any]) -> EndpointResult: + async def get_blockchain_state(self, _: dict[str, Any]) -> EndpointResult: """ Returns a summary of the node's view of the blockchain. """ @@ -278,7 +278,7 @@ async def get_blockchain_state(self, _: Dict[str, Any]) -> EndpointResult: self.cached_blockchain_state = dict(response["blockchain_state"]) return response - async def get_recent_signage_point_or_eos(self, request: Dict[str, Any]) -> EndpointResult: + async def get_recent_signage_point_or_eos(self, request: dict[str, Any]) -> EndpointResult: if "sp_hash" not in request: challenge_hash: bytes32 = bytes32.from_hexstr(request["challenge_hash"]) # This is the case of getting an end of slot @@ -369,7 +369,7 @@ async def get_recent_signage_point_or_eos(self, request: Dict[str, Any]) -> Endp return {"signage_point": sp, "time_received": time_received, "reverted": True} - async def get_block(self, request: Dict[str, Any]) -> EndpointResult: + async def get_block(self, request: dict[str, Any]) -> EndpointResult: if "header_hash" not in request: raise ValueError("No header_hash in request") header_hash = bytes32.from_hexstr(request["header_hash"]) @@ -380,7 +380,7 @@ async def get_block(self, request: Dict[str, Any]) -> EndpointResult: return {"block": block} - async def get_blocks(self, request: Dict[str, Any]) -> EndpointResult: + async def get_blocks(self, request: dict[str, Any]) -> EndpointResult: if "start" not in request: raise ValueError("No start in request") if "end" not in request: @@ -397,7 +397,7 @@ async def get_blocks(self, request: Dict[str, Any]) -> EndpointResult: block_range = [] for a in range(start, end): block_range.append(uint32(a)) - blocks: List[FullBlock] = await self.service.block_store.get_full_blocks_at(block_range) + blocks: list[FullBlock] = await self.service.block_store.get_full_blocks_at(block_range) json_blocks = [] for block in blocks: hh: bytes32 = block.header_hash @@ -410,7 +410,7 @@ async def get_blocks(self, request: Dict[str, Any]) -> EndpointResult: json_blocks.append(json) return {"blocks": json_blocks} - async def get_block_count_metrics(self, _: Dict[str, Any]) -> EndpointResult: + async def get_block_count_metrics(self, _: dict[str, Any]) -> EndpointResult: compact_blocks = 0 uncompact_blocks = 0 with log_exceptions(self.service.log, consume=True): @@ -430,7 +430,7 @@ async def get_block_count_metrics(self, _: Dict[str, Any]) -> EndpointResult: } } - async def get_block_records(self, request: Dict[str, Any]) -> EndpointResult: + async def get_block_records(self, request: dict[str, Any]) -> EndpointResult: if "start" not in request: raise ValueError("No start in request") if "end" not in request: @@ -460,7 +460,7 @@ async def get_block_records(self, request: Dict[str, Any]) -> EndpointResult: records.append(record) return {"block_records": records} - async def get_block_spends(self, request: Dict[str, Any]) -> EndpointResult: + async def get_block_spends(self, request: dict[str, Any]) -> EndpointResult: if "header_hash" not in request: raise ValueError("No header_hash in request") header_hash = bytes32.from_hexstr(request["header_hash"]) @@ -468,7 +468,7 @@ async def get_block_spends(self, request: Dict[str, Any]) -> EndpointResult: if full_block is None: raise ValueError(f"Block {header_hash.hex()} not found") - spends: List[CoinSpend] = [] + spends: list[CoinSpend] = [] block_generator = await get_block_generator(self.service.blockchain.lookup_block_generators, full_block) if block_generator is None: # if block is not a transaction block. return {"block_spends": spends} @@ -477,7 +477,7 @@ async def get_block_spends(self, request: Dict[str, Any]) -> EndpointResult: return {"block_spends": spends} - async def get_block_spends_with_conditions(self, request: Dict[str, Any]) -> EndpointResult: + async def get_block_spends_with_conditions(self, request: dict[str, Any]) -> EndpointResult: if "header_hash" not in request: raise ValueError("No header_hash in request") header_hash = bytes32.from_hexstr(request["header_hash"]) @@ -506,7 +506,7 @@ async def get_block_spends_with_conditions(self, request: Dict[str, Any]) -> End ] } - async def get_block_record_by_height(self, request: Dict[str, Any]) -> EndpointResult: + async def get_block_record_by_height(self, request: dict[str, Any]) -> EndpointResult: if "height" not in request: raise ValueError("No height in request") height = request["height"] @@ -525,7 +525,7 @@ async def get_block_record_by_height(self, request: Dict[str, Any]) -> EndpointR raise ValueError(f"Block {header_hash} does not exist") return {"block_record": record} - async def get_block_record(self, request: Dict[str, Any]) -> EndpointResult: + async def get_block_record(self, request: dict[str, Any]) -> EndpointResult: if "header_hash" not in request: raise ValueError("header_hash not in request") header_hash_str = request["header_hash"] @@ -539,12 +539,12 @@ async def get_block_record(self, request: Dict[str, Any]) -> EndpointResult: return {"block_record": record} - async def get_unfinished_block_headers(self, _request: Dict[str, Any]) -> EndpointResult: + async def get_unfinished_block_headers(self, _request: dict[str, Any]) -> EndpointResult: peak: Optional[BlockRecord] = self.service.blockchain.get_peak() if peak is None: return {"headers": []} - response_headers: List[UnfinishedHeaderBlock] = [] + response_headers: list[UnfinishedHeaderBlock] = [] for block in self.service.full_node_store.get_unfinished_blocks(peak.height): unfinished_header_block = UnfinishedHeaderBlock( block.finished_sub_slots, @@ -558,7 +558,7 @@ async def get_unfinished_block_headers(self, _request: Dict[str, Any]) -> Endpoi response_headers.append(unfinished_header_block) return {"headers": response_headers} - async def get_network_space(self, request: Dict[str, Any]) -> EndpointResult: + async def get_network_space(self, request: dict[str, Any]) -> EndpointResult: """ Retrieves an estimate of total space validating the chain between two block header hashes. @@ -599,13 +599,13 @@ async def get_network_space(self, request: Dict[str, Any]) -> EndpointResult: ) return {"space": uint128(int(network_space_bytes_estimate))} - async def get_coin_records_by_puzzle_hash(self, request: Dict[str, Any]) -> EndpointResult: + async def get_coin_records_by_puzzle_hash(self, request: dict[str, Any]) -> EndpointResult: """ Retrieves the coins for a given puzzlehash, by default returns unspent coins. """ if "puzzle_hash" not in request: raise ValueError("Puzzle hash not in request") - kwargs: Dict[str, Any] = {"include_spent_coins": False, "puzzle_hash": hexstr_to_bytes(request["puzzle_hash"])} + kwargs: dict[str, Any] = {"include_spent_coins": False, "puzzle_hash": hexstr_to_bytes(request["puzzle_hash"])} if "start_height" in request: kwargs["start_height"] = uint32(request["start_height"]) if "end_height" in request: @@ -618,13 +618,13 @@ async def get_coin_records_by_puzzle_hash(self, request: Dict[str, Any]) -> Endp return {"coin_records": [coin_record_dict_backwards_compat(cr.to_json_dict()) for cr in coin_records]} - async def get_coin_records_by_puzzle_hashes(self, request: Dict[str, Any]) -> EndpointResult: + async def get_coin_records_by_puzzle_hashes(self, request: dict[str, Any]) -> EndpointResult: """ Retrieves the coins for a given puzzlehash, by default returns unspent coins. """ if "puzzle_hashes" not in request: raise ValueError("Puzzle hashes not in request") - kwargs: Dict[str, Any] = { + kwargs: dict[str, Any] = { "include_spent_coins": False, "puzzle_hashes": [hexstr_to_bytes(ph) for ph in request["puzzle_hashes"]], } @@ -640,7 +640,7 @@ async def get_coin_records_by_puzzle_hashes(self, request: Dict[str, Any]) -> En return {"coin_records": [coin_record_dict_backwards_compat(cr.to_json_dict()) for cr in coin_records]} - async def get_coin_record_by_name(self, request: Dict[str, Any]) -> EndpointResult: + async def get_coin_record_by_name(self, request: dict[str, Any]) -> EndpointResult: """ Retrieves a coin record by its name. """ @@ -654,13 +654,13 @@ async def get_coin_record_by_name(self, request: Dict[str, Any]) -> EndpointResu return {"coin_record": coin_record_dict_backwards_compat(coin_record.to_json_dict())} - async def get_coin_records_by_names(self, request: Dict[str, Any]) -> EndpointResult: + async def get_coin_records_by_names(self, request: dict[str, Any]) -> EndpointResult: """ Retrieves the coins for given coin IDs, by default returns unspent coins. """ if "names" not in request: raise ValueError("Names not in request") - kwargs: Dict[str, Any] = { + kwargs: dict[str, Any] = { "include_spent_coins": False, "names": [hexstr_to_bytes(name) for name in request["names"]], } @@ -676,13 +676,13 @@ async def get_coin_records_by_names(self, request: Dict[str, Any]) -> EndpointRe return {"coin_records": [coin_record_dict_backwards_compat(cr.to_json_dict()) for cr in coin_records]} - async def get_coin_records_by_parent_ids(self, request: Dict[str, Any]) -> EndpointResult: + async def get_coin_records_by_parent_ids(self, request: dict[str, Any]) -> EndpointResult: """ Retrieves the coins for given parent coin IDs, by default returns unspent coins. """ if "parent_ids" not in request: raise ValueError("Parent IDs not in request") - kwargs: Dict[str, Any] = { + kwargs: dict[str, Any] = { "include_spent_coins": False, "parent_ids": [hexstr_to_bytes(ph) for ph in request["parent_ids"]], } @@ -698,7 +698,7 @@ async def get_coin_records_by_parent_ids(self, request: Dict[str, Any]) -> Endpo return {"coin_records": [coin_record_dict_backwards_compat(cr.to_json_dict()) for cr in coin_records]} - async def get_coin_records_by_hint(self, request: Dict[str, Any]) -> EndpointResult: + async def get_coin_records_by_hint(self, request: dict[str, Any]) -> EndpointResult: """ Retrieves coins by hint, by default returns unspent coins. """ @@ -708,9 +708,9 @@ async def get_coin_records_by_hint(self, request: Dict[str, Any]) -> EndpointRes if self.service.hint_store is None: return {"coin_records": []} - names: List[bytes32] = await self.service.hint_store.get_coin_ids(bytes32.from_hexstr(request["hint"])) + names: list[bytes32] = await self.service.hint_store.get_coin_ids(bytes32.from_hexstr(request["hint"])) - kwargs: Dict[str, Any] = { + kwargs: dict[str, Any] = { "include_spent_coins": False, "names": names, } @@ -727,7 +727,7 @@ async def get_coin_records_by_hint(self, request: Dict[str, Any]) -> EndpointRes return {"coin_records": [coin_record_dict_backwards_compat(cr.to_json_dict()) for cr in coin_records]} - async def push_tx(self, request: Dict[str, Any]) -> EndpointResult: + async def push_tx(self, request: dict[str, Any]) -> EndpointResult: if "spend_bundle" not in request: raise ValueError("Spend bundle not in request") @@ -752,7 +752,7 @@ async def push_tx(self, request: Dict[str, Any]) -> EndpointResult: "status": status.name, } - async def get_puzzle_and_solution(self, request: Dict[str, Any]) -> EndpointResult: + async def get_puzzle_and_solution(self, request: dict[str, Any]) -> EndpointResult: coin_name: bytes32 = bytes32.from_hexstr(request["coin_id"]) height = request["height"] coin_record = await self.service.coin_store.get_coin_record(coin_name) @@ -776,7 +776,7 @@ async def get_puzzle_and_solution(self, request: Dict[str, Any]) -> EndpointResu ) return {"coin_solution": CoinSpend(coin_record.coin, spend_info.puzzle, spend_info.solution)} - async def get_additions_and_removals(self, request: Dict[str, Any]) -> EndpointResult: + async def get_additions_and_removals(self, request: dict[str, Any]) -> EndpointResult: if "header_hash" not in request: raise ValueError("No header_hash in request") header_hash = bytes32.from_hexstr(request["header_hash"]) @@ -788,28 +788,28 @@ async def get_additions_and_removals(self, request: Dict[str, Any]) -> EndpointR async with self.service.blockchain.priority_mutex.acquire(priority=BlockchainMutexPriority.low): if self.service.blockchain.height_to_hash(block.height) != header_hash: raise ValueError(f"Block at {header_hash.hex()} is no longer in the blockchain (it's in a fork)") - additions: List[CoinRecord] = await self.service.coin_store.get_coins_added_at_height(block.height) - removals: List[CoinRecord] = await self.service.coin_store.get_coins_removed_at_height(block.height) + additions: list[CoinRecord] = await self.service.coin_store.get_coins_added_at_height(block.height) + removals: list[CoinRecord] = await self.service.coin_store.get_coins_removed_at_height(block.height) return { "additions": [coin_record_dict_backwards_compat(cr.to_json_dict()) for cr in additions], "removals": [coin_record_dict_backwards_compat(cr.to_json_dict()) for cr in removals], } - async def get_aggsig_additional_data(self, _: Dict[str, Any]) -> EndpointResult: + async def get_aggsig_additional_data(self, _: dict[str, Any]) -> EndpointResult: return {"additional_data": self.service.constants.AGG_SIG_ME_ADDITIONAL_DATA.hex()} - async def get_all_mempool_tx_ids(self, _: Dict[str, Any]) -> EndpointResult: + async def get_all_mempool_tx_ids(self, _: dict[str, Any]) -> EndpointResult: ids = list(self.service.mempool_manager.mempool.all_item_ids()) return {"tx_ids": ids} - async def get_all_mempool_items(self, _: Dict[str, Any]) -> EndpointResult: + async def get_all_mempool_items(self, _: dict[str, Any]) -> EndpointResult: spends = {} for item in self.service.mempool_manager.mempool.all_items(): spends[item.name.hex()] = item.to_json_dict() return {"mempool_items": spends} - async def get_mempool_item_by_tx_id(self, request: Dict[str, Any]) -> EndpointResult: + async def get_mempool_item_by_tx_id(self, request: dict[str, Any]) -> EndpointResult: if "tx_id" not in request: raise ValueError("No tx_id in request") include_pending: bool = request.get("include_pending", False) @@ -821,7 +821,7 @@ async def get_mempool_item_by_tx_id(self, request: Dict[str, Any]) -> EndpointRe return {"mempool_item": item.to_json_dict()} - async def get_mempool_items_by_coin_name(self, request: Dict[str, Any]) -> EndpointResult: + async def get_mempool_items_by_coin_name(self, request: dict[str, Any]) -> EndpointResult: if "coin_name" not in request: raise ValueError("No coin_name in request") @@ -849,7 +849,7 @@ def _get_spendbundle_type_cost(self, name: str) -> uint64: } return uint64(tx_cost_estimates[name]) - async def _validate_fee_estimate_cost(self, request: Dict[str, Any]) -> uint64: + async def _validate_fee_estimate_cost(self, request: dict[str, Any]) -> uint64: c = 0 ns = ["spend_bundle", "cost", "spend_type"] for n in ns: @@ -869,17 +869,17 @@ async def _validate_fee_estimate_cost(self, request: Dict[str, Any]) -> uint64: cost *= request.get("spend_count", 1) return uint64(cost) - def _validate_target_times(self, request: Dict[str, Any]) -> None: + def _validate_target_times(self, request: dict[str, Any]) -> None: if "target_times" not in request: raise ValueError("Request must contain 'target_times' array") if any(t < 0 for t in request["target_times"]): raise ValueError("'target_times' array members must be non-negative") - async def get_fee_estimate(self, request: Dict[str, Any]) -> Dict[str, Any]: + async def get_fee_estimate(self, request: dict[str, Any]) -> dict[str, Any]: self._validate_target_times(request) spend_cost = await self._validate_fee_estimate_cost(request) - target_times: List[int] = request["target_times"] + target_times: list[int] = request["target_times"] estimator: FeeEstimatorInterface = self.service.mempool_manager.mempool.fee_estimator target_times.sort() estimates = [ diff --git a/chia/rpc/full_node_rpc_client.py b/chia/rpc/full_node_rpc_client.py index eba4e0f9e97e..05d3bbee38a6 100644 --- a/chia/rpc/full_node_rpc_client.py +++ b/chia/rpc/full_node_rpc_client.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Any, Dict, List, Optional, Tuple, cast +from typing import Any, Optional, cast from chia.consensus.block_record import BlockRecord from chia.full_node.signage_point import SignagePoint @@ -15,7 +15,7 @@ from chia.util.ints import uint32 -def coin_record_dict_backwards_compat(coin_record: Dict[str, Any]) -> Dict[str, Any]: +def coin_record_dict_backwards_compat(coin_record: dict[str, Any]) -> dict[str, Any]: del coin_record["spent"] return coin_record @@ -29,11 +29,11 @@ class FullNodeRpcClient(RpcClient): to the full node. """ - async def get_blockchain_state(self) -> Dict[str, Any]: + async def get_blockchain_state(self) -> dict[str, Any]: response = await self.fetch("get_blockchain_state", {}) if response["blockchain_state"]["peak"] is not None: response["blockchain_state"]["peak"] = BlockRecord.from_json_dict(response["blockchain_state"]["peak"]) - return cast(Dict[str, Any], response["blockchain_state"]) + return cast(dict[str, Any], response["blockchain_state"]) async def get_block(self, header_hash: bytes32) -> Optional[FullBlock]: try: @@ -42,7 +42,7 @@ async def get_block(self, header_hash: bytes32) -> Optional[FullBlock]: return None return FullBlock.from_json_dict(response["block"]) - async def get_blocks(self, start: int, end: int, exclude_reorged: bool = False) -> List[FullBlock]: + async def get_blocks(self, start: int, end: int, exclude_reorged: bool = False) -> list[FullBlock]: response = await self.fetch( "get_blocks", {"start": start, "end": end, "exclude_header_hash": True, "exclude_reorged": exclude_reorged} ) @@ -64,11 +64,11 @@ async def get_block_record(self, header_hash: bytes32) -> Optional[BlockRecord]: return None return BlockRecord.from_json_dict(response["block_record"]) - async def get_unfinished_block_headers(self) -> List[UnfinishedHeaderBlock]: + async def get_unfinished_block_headers(self) -> list[UnfinishedHeaderBlock]: response = await self.fetch("get_unfinished_block_headers", {}) return [UnfinishedHeaderBlock.from_json_dict(r) for r in response["headers"]] - async def get_all_block(self, start: uint32, end: uint32) -> List[FullBlock]: + async def get_all_block(self, start: uint32, end: uint32) -> list[FullBlock]: response = await self.fetch("get_blocks", {"start": start, "end": end, "exclude_header_hash": True}) return [FullBlock.from_json_dict(r) for r in response["blocks"]] @@ -93,11 +93,11 @@ async def get_coin_record_by_name(self, coin_id: bytes32) -> Optional[CoinRecord async def get_coin_records_by_names( self, - names: List[bytes32], + names: list[bytes32], include_spent_coins: bool = True, start_height: Optional[int] = None, end_height: Optional[int] = None, - ) -> List[CoinRecord]: + ) -> list[CoinRecord]: names_hex = [name.hex() for name in names] d = {"names": names_hex, "include_spent_coins": include_spent_coins} if start_height is not None: @@ -114,7 +114,7 @@ async def get_coin_records_by_puzzle_hash( include_spent_coins: bool = True, start_height: Optional[int] = None, end_height: Optional[int] = None, - ) -> List[CoinRecord]: + ) -> list[CoinRecord]: d = {"puzzle_hash": puzzle_hash.hex(), "include_spent_coins": include_spent_coins} if start_height is not None: d["start_height"] = start_height @@ -126,11 +126,11 @@ async def get_coin_records_by_puzzle_hash( async def get_coin_records_by_puzzle_hashes( self, - puzzle_hashes: List[bytes32], + puzzle_hashes: list[bytes32], include_spent_coins: bool = True, start_height: Optional[int] = None, end_height: Optional[int] = None, - ) -> List[CoinRecord]: + ) -> list[CoinRecord]: puzzle_hashes_hex = [ph.hex() for ph in puzzle_hashes] d = {"puzzle_hashes": puzzle_hashes_hex, "include_spent_coins": include_spent_coins} if start_height is not None: @@ -143,11 +143,11 @@ async def get_coin_records_by_puzzle_hashes( async def get_coin_records_by_parent_ids( self, - parent_ids: List[bytes32], + parent_ids: list[bytes32], include_spent_coins: bool = True, start_height: Optional[int] = None, end_height: Optional[int] = None, - ) -> List[CoinRecord]: + ) -> list[CoinRecord]: parent_ids_hex = [pid.hex() for pid in parent_ids] d = {"parent_ids": parent_ids_hex, "include_spent_coins": include_spent_coins} if start_height is not None: @@ -168,7 +168,7 @@ async def get_coin_records_by_hint( include_spent_coins: bool = True, start_height: Optional[int] = None, end_height: Optional[int] = None, - ) -> List[CoinRecord]: + ) -> list[CoinRecord]: d = {"hint": hint.hex(), "include_spent_coins": include_spent_coins} if start_height is not None: d["start_height"] = start_height @@ -178,7 +178,7 @@ async def get_coin_records_by_hint( response = await self.fetch("get_coin_records_by_hint", d) return [CoinRecord.from_json_dict(coin_record_dict_backwards_compat(coin)) for coin in response["coin_records"]] - async def get_additions_and_removals(self, header_hash: bytes32) -> Tuple[List[CoinRecord], List[CoinRecord]]: + async def get_additions_and_removals(self, header_hash: bytes32) -> tuple[list[CoinRecord], list[CoinRecord]]: try: response = await self.fetch("get_additions_and_removals", {"header_hash": header_hash.hex()}) except Exception: @@ -191,7 +191,7 @@ async def get_additions_and_removals(self, header_hash: bytes32) -> Tuple[List[C additions.append(CoinRecord.from_json_dict(coin_record_dict_backwards_compat(coin_record))) return additions, removals - async def get_block_records(self, start: int, end: int) -> List[Dict[str, Any]]: + async def get_block_records(self, start: int, end: int) -> list[dict[str, Any]]: try: response = await self.fetch("get_block_records", {"start": start, "end": end}) if response["block_records"] is None: @@ -199,9 +199,9 @@ async def get_block_records(self, start: int, end: int) -> List[Dict[str, Any]]: except Exception: return [] # TODO: return block records - return cast(List[Dict[str, Any]], response["block_records"]) + return cast(list[dict[str, Any]], response["block_records"]) - async def get_block_spends(self, header_hash: bytes32) -> Optional[List[CoinSpend]]: + async def get_block_spends(self, header_hash: bytes32) -> Optional[list[CoinSpend]]: try: response = await self.fetch("get_block_spends", {"header_hash": header_hash.hex()}) block_spends = [] @@ -211,10 +211,10 @@ async def get_block_spends(self, header_hash: bytes32) -> Optional[List[CoinSpen except Exception: return None - async def get_block_spends_with_conditions(self, header_hash: bytes32) -> Optional[List[CoinSpendWithConditions]]: + async def get_block_spends_with_conditions(self, header_hash: bytes32) -> Optional[list[CoinSpendWithConditions]]: try: response = await self.fetch("get_block_spends_with_conditions", {"header_hash": header_hash.hex()}) - block_spends: List[CoinSpendWithConditions] = [] + block_spends: list[CoinSpendWithConditions] = [] for block_spend in response["block_spends_with_conditions"]: block_spends.append(CoinSpendWithConditions.from_json_dict(block_spend)) return block_spends @@ -222,7 +222,7 @@ async def get_block_spends_with_conditions(self, header_hash: bytes32) -> Option except Exception: return None - async def push_tx(self, spend_bundle: SpendBundle) -> Dict[str, Any]: + async def push_tx(self, spend_bundle: SpendBundle) -> dict[str, Any]: return await self.fetch("push_tx", {"spend_bundle": spend_bundle.to_json_dict()}) async def get_puzzle_and_solution(self, coin_id: bytes32, height: uint32) -> Optional[CoinSpend]: @@ -232,13 +232,13 @@ async def get_puzzle_and_solution(self, coin_id: bytes32, height: uint32) -> Opt except Exception: return None - async def get_all_mempool_tx_ids(self) -> List[bytes32]: + async def get_all_mempool_tx_ids(self) -> list[bytes32]: response = await self.fetch("get_all_mempool_tx_ids", {}) return [bytes32.from_hexstr(tx_id_hex) for tx_id_hex in response["tx_ids"]] - async def get_all_mempool_items(self) -> Dict[bytes32, Dict[str, Any]]: + async def get_all_mempool_items(self) -> dict[bytes32, dict[str, Any]]: response = await self.fetch("get_all_mempool_items", {}) - converted: Dict[bytes32, Dict[str, Any]] = {} + converted: dict[bytes32, dict[str, Any]] = {} for tx_id_hex, item in response["mempool_items"].items(): converted[bytes32.from_hexstr(tx_id_hex)] = item return converted @@ -247,16 +247,16 @@ async def get_mempool_item_by_tx_id( self, tx_id: bytes32, include_pending: bool = False, - ) -> Optional[Dict[str, Any]]: + ) -> Optional[dict[str, Any]]: try: response = await self.fetch( "get_mempool_item_by_tx_id", {"tx_id": tx_id.hex(), "include_pending": include_pending} ) - return cast(Dict[str, Any], response["mempool_item"]) + return cast(dict[str, Any], response["mempool_item"]) except Exception: return None - async def get_mempool_items_by_coin_name(self, coin_name: bytes32) -> Dict[str, Any]: + async def get_mempool_items_by_coin_name(self, coin_name: bytes32) -> dict[str, Any]: response = await self.fetch("get_mempool_items_by_coin_name", {"coin_name": coin_name.hex()}) return response @@ -285,8 +285,8 @@ async def get_recent_signage_point_or_eos( async def get_fee_estimate( self, - target_times: Optional[List[int]], + target_times: Optional[list[int]], cost: Optional[int], - ) -> Dict[str, Any]: + ) -> dict[str, Any]: response = await self.fetch("get_fee_estimate", {"cost": cost, "target_times": target_times}) return response diff --git a/chia/rpc/harvester_rpc_api.py b/chia/rpc/harvester_rpc_api.py index 0cacad4653b9..8da6cfe12795 100644 --- a/chia/rpc/harvester_rpc_api.py +++ b/chia/rpc/harvester_rpc_api.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Any, Dict, List, Optional +from typing import Any, Optional from chia.harvester.harvester import Harvester from chia.rpc.rpc_server import Endpoint, EndpointResult @@ -13,7 +13,7 @@ def __init__(self, harvester: Harvester): self.service = harvester self.service_name = "chia_harvester" - def get_routes(self) -> Dict[str, Endpoint]: + def get_routes(self) -> dict[str, Endpoint]: return { "/get_plots": self.get_plots, "/refresh_plots": self.refresh_plots, @@ -25,7 +25,7 @@ def get_routes(self) -> Dict[str, Endpoint]: "/update_harvester_config": self.update_harvester_config, } - async def _state_changed(self, change: str, change_data: Optional[Dict[str, Any]] = None) -> List[WsRpcMessage]: + async def _state_changed(self, change: str, change_data: Optional[dict[str, Any]] = None) -> list[WsRpcMessage]: if change_data is None: change_data = {} @@ -48,7 +48,7 @@ async def _state_changed(self, change: str, change_data: Optional[Dict[str, Any] return payloads - async def get_plots(self, _: Dict[str, Any]) -> EndpointResult: + async def get_plots(self, _: dict[str, Any]) -> EndpointResult: plots, failed_to_open, not_found = self.service.get_plots() return { "plots": plots, @@ -56,33 +56,33 @@ async def get_plots(self, _: Dict[str, Any]) -> EndpointResult: "not_found_filenames": not_found, } - async def refresh_plots(self, _: Dict[str, Any]) -> EndpointResult: + async def refresh_plots(self, _: dict[str, Any]) -> EndpointResult: self.service.plot_manager.trigger_refresh() return {} - async def delete_plot(self, request: Dict[str, Any]) -> EndpointResult: + async def delete_plot(self, request: dict[str, Any]) -> EndpointResult: filename = request["filename"] if self.service.delete_plot(filename): return {} raise ValueError(f"Not able to delete file {filename}") - async def add_plot_directory(self, request: Dict[str, Any]) -> EndpointResult: + async def add_plot_directory(self, request: dict[str, Any]) -> EndpointResult: directory_name = request["dirname"] if await self.service.add_plot_directory(directory_name): return {} raise ValueError(f"Did not add plot directory {directory_name}") - async def get_plot_directories(self, _: Dict[str, Any]) -> EndpointResult: + async def get_plot_directories(self, _: dict[str, Any]) -> EndpointResult: plot_dirs = await self.service.get_plot_directories() return {"directories": plot_dirs} - async def remove_plot_directory(self, request: Dict[str, Any]) -> EndpointResult: + async def remove_plot_directory(self, request: dict[str, Any]) -> EndpointResult: directory_name = request["dirname"] if await self.service.remove_plot_directory(directory_name): return {} raise ValueError(f"Did not remove plot directory {directory_name}") - async def get_harvester_config(self, _: Dict[str, Any]) -> EndpointResult: + async def get_harvester_config(self, _: dict[str, Any]) -> EndpointResult: harvester_config = await self.service.get_harvester_config() return { "use_gpu_harvesting": harvester_config["use_gpu_harvesting"], @@ -95,7 +95,7 @@ async def get_harvester_config(self, _: Dict[str, Any]) -> EndpointResult: "refresh_parameter_interval_seconds": harvester_config["plots_refresh_parameter"].get("interval_seconds"), } - async def update_harvester_config(self, request: Dict[str, Any]) -> EndpointResult: + async def update_harvester_config(self, request: dict[str, Any]) -> EndpointResult: use_gpu_harvesting: Optional[bool] = None gpu_index: Optional[int] = None enforce_gpu_index: Optional[bool] = None diff --git a/chia/rpc/harvester_rpc_client.py b/chia/rpc/harvester_rpc_client.py index 9741787cec18..5831d8641032 100644 --- a/chia/rpc/harvester_rpc_client.py +++ b/chia/rpc/harvester_rpc_client.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Any, Dict, List, cast +from typing import Any, cast from chia.rpc.rpc_client import RpcClient @@ -14,7 +14,7 @@ class HarvesterRpcClient(RpcClient): to the full node. """ - async def get_plots(self) -> Dict[str, Any]: + async def get_plots(self) -> dict[str, Any]: return await self.fetch("get_plots", {}) async def refresh_plots(self) -> None: @@ -32,10 +32,10 @@ async def add_plot_directory(self, dirname: str) -> bool: result = cast(bool, response["success"]) return result - async def get_plot_directories(self) -> List[str]: + async def get_plot_directories(self) -> list[str]: response = await self.fetch("get_plot_directories", {}) # TODO: casting due to lack of type checked deserialization - result = cast(List[str], response["directories"]) + result = cast(list[str], response["directories"]) return result async def remove_plot_directory(self, dirname: str) -> bool: @@ -44,10 +44,10 @@ async def remove_plot_directory(self, dirname: str) -> bool: result = cast(bool, response["success"]) return result - async def get_harvester_config(self) -> Dict[str, Any]: + async def get_harvester_config(self) -> dict[str, Any]: return await self.fetch("get_harvester_config", {}) - async def update_harvester_config(self, config: Dict[str, Any]) -> bool: + async def update_harvester_config(self, config: dict[str, Any]) -> bool: response = await self.fetch("update_harvester_config", config) # TODO: casting due to lack of type checked deserialization result = cast(bool, response["success"]) diff --git a/chia/rpc/rpc_client.py b/chia/rpc/rpc_client.py index 23b8cb10a7d0..56882dcc722a 100644 --- a/chia/rpc/rpc_client.py +++ b/chia/rpc/rpc_client.py @@ -2,11 +2,12 @@ import asyncio import json +from collections.abc import AsyncIterator from contextlib import asynccontextmanager from dataclasses import dataclass from pathlib import Path from ssl import SSLContext -from typing import Any, AsyncIterator, Dict, List, Optional, Type, TypeVar +from typing import Any, Optional, TypeVar import aiohttp @@ -24,7 +25,7 @@ # the possibility to identify these errors in new code from having to review and # clean up existing code. class ResponseFailureError(ValueError): - def __init__(self, response: Dict[str, Any]): + def __init__(self, response: dict[str, Any]): self.response = response super().__init__(f"RPC response failure: {json.dumps(response)}") @@ -48,11 +49,11 @@ class RpcClient: @classmethod async def create( - cls: Type[_T_RpcClient], + cls: type[_T_RpcClient], self_hostname: str, port: uint16, root_path: Optional[Path], - net_config: Optional[Dict[str, Any]], + net_config: Optional[dict[str, Any]], ) -> _T_RpcClient: if (root_path is not None) != (net_config is not None): raise ValueError("Either both or neither of root_path and net_config must be provided") @@ -87,11 +88,11 @@ async def create( @classmethod @asynccontextmanager async def create_as_context( - cls: Type[_T_RpcClient], + cls: type[_T_RpcClient], self_hostname: str, port: uint16, root_path: Optional[Path] = None, - net_config: Optional[Dict[str, Any]] = None, + net_config: Optional[dict[str, Any]] = None, ) -> AsyncIterator[_T_RpcClient]: self = await cls.create( self_hostname=self_hostname, @@ -105,7 +106,7 @@ async def create_as_context( self.close() await self.await_closed() - async def fetch(self, path, request_json) -> Dict[str, Any]: + async def fetch(self, path, request_json) -> dict[str, Any]: async with self.session.post( self.url + path, json=request_json, ssl=self.ssl_context if self.ssl_context is not None else True ) as response: @@ -115,7 +116,7 @@ async def fetch(self, path, request_json) -> Dict[str, Any]: raise ResponseFailureError(res_json) return res_json - async def get_connections(self, node_type: Optional[NodeType] = None) -> List[Dict]: + async def get_connections(self, node_type: Optional[NodeType] = None) -> list[dict]: request = {} if node_type is not None: request["node_type"] = node_type.value @@ -124,16 +125,16 @@ async def get_connections(self, node_type: Optional[NodeType] = None) -> List[Di connection["node_id"] = hexstr_to_bytes(connection["node_id"]) return response["connections"] - async def open_connection(self, host: str, port: int) -> Dict: + async def open_connection(self, host: str, port: int) -> dict: return await self.fetch("open_connection", {"host": host, "port": int(port)}) - async def close_connection(self, node_id: bytes32) -> Dict: + async def close_connection(self, node_id: bytes32) -> dict: return await self.fetch("close_connection", {"node_id": node_id.hex()}) - async def stop_node(self) -> Dict: + async def stop_node(self) -> dict: return await self.fetch("stop_node", {}) - async def healthz(self) -> Dict: + async def healthz(self) -> dict: return await self.fetch("healthz", {}) def close(self) -> None: diff --git a/chia/rpc/rpc_server.py b/chia/rpc/rpc_server.py index 16a42a0ebea9..38b9c7c18236 100644 --- a/chia/rpc/rpc_server.py +++ b/chia/rpc/rpc_server.py @@ -5,10 +5,11 @@ import json import logging import traceback +from collections.abc import AsyncIterator, Awaitable from dataclasses import dataclass from pathlib import Path from ssl import SSLContext -from typing import Any, AsyncIterator, Awaitable, Callable, Dict, Generic, List, Optional, TypeVar +from typing import Any, Callable, Generic, Optional, TypeVar from aiohttp import ClientConnectorError, ClientSession, ClientWebSocketResponse, WSMsgType, web from typing_extensions import Protocol, final @@ -30,13 +31,13 @@ max_message_size = 50 * 1024 * 1024 # 50MB -EndpointResult = Dict[str, Any] -Endpoint = Callable[[Dict[str, object]], Awaitable[EndpointResult]] +EndpointResult = dict[str, Any] +Endpoint = Callable[[dict[str, object]], Awaitable[EndpointResult]] _T_RpcApiProtocol = TypeVar("_T_RpcApiProtocol", bound="RpcApiProtocol") class StateChangedProtocol(Protocol): - def __call__(self, change: str, change_data: Optional[Dict[str, Any]]) -> None: ... + def __call__(self, change: str, change_data: Optional[dict[str, Any]]) -> None: ... class RpcServiceProtocol(Protocol): @@ -54,7 +55,7 @@ def server(self) -> ChiaServer: # Optional[ChiaServer] ... - def get_connections(self, request_node_type: Optional[NodeType]) -> List[Dict[str, Any]]: + def get_connections(self, request_node_type: Optional[NodeType]) -> list[dict[str, Any]]: """Report the active connections for the service. A default implementation is available and can be called as @@ -90,16 +91,16 @@ def service(self) -> RpcServiceProtocol: # using a read-only property per https://github.com/python/mypy/issues/12990 ... - def get_routes(self) -> Dict[str, Endpoint]: + def get_routes(self) -> dict[str, Endpoint]: """Return the mapping of endpoints to handler callables.""" ... - async def _state_changed(self, change: str, change_data: Optional[Dict[str, Any]]) -> List[WsRpcMessage]: + async def _state_changed(self, change: str, change_data: Optional[dict[str, Any]]) -> list[WsRpcMessage]: """Notify the state change system of a changed state.""" ... -def default_get_connections(server: ChiaServer, request_node_type: Optional[NodeType]) -> List[Dict[str, Any]]: +def default_get_connections(server: ChiaServer, request_node_type: Optional[NodeType]) -> list[dict[str, Any]]: connections = server.get_connections(request_node_type) con_info = [ { @@ -131,7 +132,7 @@ class RpcServer(Generic[_T_RpcApiProtocol]): service_name: str ssl_context: SSLContext ssl_client_context: SSLContext - net_config: Dict[str, Any] + net_config: dict[str, Any] webserver: Optional[WebServer] = None daemon_heartbeat: int = 300 daemon_connection_task: Optional[asyncio.Task[None]] = None @@ -147,7 +148,7 @@ def create( service_name: str, stop_cb: Callable[[], None], root_path: Path, - net_config: Dict[str, Any], + net_config: dict[str, Any], prefer_ipv6: bool, ) -> RpcServer[_T_RpcApiProtocol]: crt_path = root_path / net_config["daemon_ssl"]["private_crt"] @@ -196,10 +197,10 @@ async def await_closed(self) -> None: await self.daemon_connection_task self.daemon_connection_task = None - async def _state_changed(self, change: str, change_data: Optional[Dict[str, Any]]) -> None: + async def _state_changed(self, change: str, change_data: Optional[dict[str, Any]]) -> None: if self.websocket is None or self.websocket.closed: return None - payloads: List[WsRpcMessage] = await self.rpc_api._state_changed(change, change_data) + payloads: list[WsRpcMessage] = await self.rpc_api._state_changed(change, change_data) if change == "add_connection" or change == "close_connection" or change == "peer_changed_peak": data = await self.get_connections({}) @@ -222,7 +223,7 @@ async def _state_changed(self, change: str, change_data: Optional[Dict[str, Any] tb = traceback.format_exc() log.warning(f"Sending data failed. Exception {tb}.") - def state_changed(self, change: str, change_data: Optional[Dict[str, Any]] = None) -> None: + def state_changed(self, change: str, change_data: Optional[dict[str, Any]] = None) -> None: if self.websocket is None or self.websocket.closed: return None asyncio.create_task(self._state_changed(change, change_data)) @@ -233,7 +234,7 @@ def listen_port(self) -> uint16: raise RuntimeError("RpcServer is not started") return self.webserver.listen_port - def _get_routes(self) -> Dict[str, Endpoint]: + def _get_routes(self) -> dict[str, Endpoint]: return { **self.rpc_api.get_routes(), "/get_network_info": self.get_network_info, @@ -246,29 +247,29 @@ def _get_routes(self) -> Dict[str, Endpoint]: "/healthz": self.healthz, } - async def get_routes(self, request: Dict[str, Any]) -> EndpointResult: + async def get_routes(self, request: dict[str, Any]) -> EndpointResult: return { "success": True, "routes": list(self._get_routes().keys()), } - async def get_network_info(self, _: Dict[str, Any]) -> EndpointResult: + async def get_network_info(self, _: dict[str, Any]) -> EndpointResult: network_name = self.net_config["selected_network"] address_prefix = self.net_config["network_overrides"]["config"][network_name]["address_prefix"] genesis_challenge = self.net_config["network_overrides"]["constants"][network_name]["GENESIS_CHALLENGE"] return {"network_name": network_name, "network_prefix": address_prefix, "genesis_challenge": genesis_challenge} - async def get_connections(self, request: Dict[str, Any]) -> EndpointResult: + async def get_connections(self, request: dict[str, Any]) -> EndpointResult: request_node_type: Optional[NodeType] = None if "node_type" in request: request_node_type = NodeType(request["node_type"]) if self.rpc_api.service.server is None: raise ValueError("Global connections is not set") - con_info: List[Dict[str, Any]] + con_info: list[dict[str, Any]] con_info = self.rpc_api.service.get_connections(request_node_type=request_node_type) return {"connections": con_info} - async def open_connection(self, request: Dict[str, Any]) -> EndpointResult: + async def open_connection(self, request: dict[str, Any]) -> EndpointResult: host = request["host"] port = request["port"] target_node: PeerInfo = PeerInfo(await resolve(host, prefer_ipv6=self.prefer_ipv6), uint16(int(port))) @@ -279,7 +280,7 @@ async def open_connection(self, request: Dict[str, Any]) -> EndpointResult: return {"success": False, "error": f"could not connect to {target_node}"} return {"success": True} - async def close_connection(self, request: Dict[str, Any]) -> EndpointResult: + async def close_connection(self, request: dict[str, Any]) -> EndpointResult: node_id = hexstr_to_bytes(request["node_id"]) if self.rpc_api.service.server is None: raise web.HTTPInternalServerError() @@ -290,7 +291,7 @@ async def close_connection(self, request: Dict[str, Any]) -> EndpointResult: await connection.close() return {} - async def stop_node(self, request: Dict[str, Any]) -> EndpointResult: + async def stop_node(self, request: dict[str, Any]) -> EndpointResult: """ Shuts down the node. """ @@ -298,17 +299,17 @@ async def stop_node(self, request: Dict[str, Any]) -> EndpointResult: self.stop_cb() return {} - async def healthz(self, request: Dict[str, Any]) -> EndpointResult: + async def healthz(self, request: dict[str, Any]) -> EndpointResult: return { "success": True, } - async def get_version(self, request: Dict[str, Any]) -> EndpointResult: + async def get_version(self, request: dict[str, Any]) -> EndpointResult: return { "version": __version__, } - async def ws_api(self, message: WsRpcMessage) -> Optional[Dict[str, object]]: + async def ws_api(self, message: WsRpcMessage) -> Optional[dict[str, object]]: """ This function gets called when new message is received via websocket. """ @@ -317,7 +318,7 @@ async def ws_api(self, message: WsRpcMessage) -> Optional[Dict[str, object]]: if message["ack"]: return None - data: Dict[str, object] = {} + data: dict[str, object] = {} if "data" in message: data = message["data"] if command == "ping": @@ -417,7 +418,7 @@ async def start_rpc_server( rpc_port: uint16, stop_cb: Callable[[], None], root_path: Path, - net_config: Dict[str, object], + net_config: dict[str, object], connect_to_daemon: bool = True, max_request_body_size: Optional[int] = None, ) -> RpcServer[_T_RpcApiProtocol]: diff --git a/chia/rpc/timelord_rpc_api.py b/chia/rpc/timelord_rpc_api.py index 20ad63681731..62b99ed1e696 100644 --- a/chia/rpc/timelord_rpc_api.py +++ b/chia/rpc/timelord_rpc_api.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Any, Dict, List, Optional +from typing import Any, Optional from chia.rpc.rpc_server import Endpoint from chia.timelord.timelord import Timelord @@ -12,10 +12,10 @@ def __init__(self, timelord: Timelord): self.service = timelord self.service_name = "chia_timelord" - def get_routes(self) -> Dict[str, Endpoint]: + def get_routes(self) -> dict[str, Endpoint]: return {} - async def _state_changed(self, change: str, change_data: Optional[Dict[str, Any]] = None) -> List[WsRpcMessage]: + async def _state_changed(self, change: str, change_data: Optional[dict[str, Any]] = None) -> list[WsRpcMessage]: payloads = [] if change_data is None: diff --git a/chia/rpc/util.py b/chia/rpc/util.py index d68360941179..3d0918b0fc99 100644 --- a/chia/rpc/util.py +++ b/chia/rpc/util.py @@ -3,7 +3,8 @@ import dataclasses import logging import traceback -from typing import TYPE_CHECKING, Any, Awaitable, Callable, Dict, List, Optional, Tuple, get_type_hints +from collections.abc import Awaitable +from typing import TYPE_CHECKING, Any, Callable, Optional, get_type_hints import aiohttp from chia_rs import AugSchemeMPL @@ -31,11 +32,11 @@ # TODO: consolidate this with chia.rpc.rpc_server.Endpoint # Not all endpoints only take a dictionary so that definition is imperfect # This definition is weaker than that one however because the arguments can be anything -RpcEndpoint = Callable[..., Awaitable[Dict[str, Any]]] +RpcEndpoint = Callable[..., Awaitable[dict[str, Any]]] MarshallableRpcEndpoint = Callable[..., Awaitable[Streamable]] -ALL_TRANSLATION_LAYERS: Dict[str, TranslationLayer] = {"CHIP-0028": BLIND_SIGNER_TRANSLATION} +ALL_TRANSLATION_LAYERS: dict[str, TranslationLayer] = {"CHIP-0028": BLIND_SIGNER_TRANSLATION} def marshal(func: MarshallableRpcEndpoint) -> RpcEndpoint: @@ -44,7 +45,7 @@ def marshal(func: MarshallableRpcEndpoint) -> RpcEndpoint: assert issubclass(request_hint, Streamable) request_class = request_hint - async def rpc_endpoint(self, request: Dict[str, Any], *args: object, **kwargs: object) -> Dict[str, Any]: + async def rpc_endpoint(self, request: dict[str, Any], *args: object, **kwargs: object) -> dict[str, Any]: response_obj: Streamable = await func( self, ( @@ -104,7 +105,7 @@ def tx_endpoint( merge_spends: bool = True, ) -> Callable[[RpcEndpoint], RpcEndpoint]: def _inner(func: RpcEndpoint) -> RpcEndpoint: - async def rpc_endpoint(self, request: Dict[str, Any], *args, **kwargs) -> Dict[str, Any]: + async def rpc_endpoint(self, request: dict[str, Any], *args, **kwargs) -> dict[str, Any]: if TYPE_CHECKING: from chia.rpc.wallet_rpc_api import WalletRpcApi @@ -122,7 +123,7 @@ async def rpc_endpoint(self, request: Dict[str, Any], *args, **kwargs) -> Dict[s excluded_coin_amounts=request.get("exclude_coin_amounts"), ) if tx_config_loader.excluded_coin_ids is None: - excluded_coins: Optional[List[Dict[str, Any]]] = request.get( + excluded_coins: Optional[list[dict[str, Any]]] = request.get( "exclude_coins", request.get("excluded_coins") ) if excluded_coins is not None: @@ -136,7 +137,7 @@ async def rpc_endpoint(self, request: Dict[str, Any], *args, **kwargs) -> Dict[s logged_in_fingerprint=self.service.logged_in_fingerprint, ) - extra_conditions: Tuple[Condition, ...] = tuple() + extra_conditions: tuple[Condition, ...] = tuple() if "extra_conditions" in request: extra_conditions = tuple(conditions_from_json_dicts(request["extra_conditions"])) extra_conditions = (*extra_conditions, *ConditionValidTimes.from_json_dict(request).to_conditions()) @@ -156,7 +157,7 @@ async def rpc_endpoint(self, request: Dict[str, Any], *args, **kwargs) -> Dict[s merge_spends=request.get("merge_spends", merge_spends), sign=request.get("sign", self.service.config.get("auto_sign_txs", True)), ) as action_scope: - response: Dict[str, Any] = await func( + response: dict[str, Any] = await func( self, request, *args, @@ -242,13 +243,13 @@ async def rpc_endpoint(self, request: Dict[str, Any], *args, **kwargs) -> Dict[s response["tx_id"] = new_txs[0].name if "trade_record" in response: old_offer: Offer = Offer.from_bech32(response["offer"]) - signed_coin_spends: List[CoinSpend] = [ + signed_coin_spends: list[CoinSpend] = [ coin_spend for tx in new_txs if tx.spend_bundle is not None for coin_spend in tx.spend_bundle.coin_spends ] - involved_coins: List[Coin] = [spend.coin for spend in signed_coin_spends] + involved_coins: list[Coin] = [spend.coin for spend in signed_coin_spends] signed_coin_spends.extend( [spend for spend in old_offer._bundle.coin_spends if spend.coin not in involved_coins] ) diff --git a/chia/rpc/wallet_request_types.py b/chia/rpc/wallet_request_types.py index 35a9f2c89625..387475823741 100644 --- a/chia/rpc/wallet_request_types.py +++ b/chia/rpc/wallet_request_types.py @@ -4,7 +4,7 @@ import sys from dataclasses import dataclass, field -from typing import Any, Dict, List, Optional, Tuple, Type, TypeVar +from typing import Any, Optional, TypeVar from chia_rs import G1Element, G2Element, PrivateKey from typing_extensions import dataclass_transform @@ -34,7 +34,7 @@ @dataclass_transform(frozen_default=True, kw_only_default=True) -def kw_only_dataclass(cls: Type[Any]) -> Type[Any]: +def kw_only_dataclass(cls: type[Any]) -> type[Any]: if sys.version_info < (3, 10): return dataclass(frozen=True)(cls) # pragma: no cover else: @@ -73,10 +73,10 @@ class GetLoggedInFingerprintResponse(Streamable): @dataclass(frozen=True) class GetPublicKeysResponse(Streamable): keyring_is_locked: bool - public_key_fingerprints: Optional[List[uint32]] = None + public_key_fingerprints: Optional[list[uint32]] = None @property - def pk_fingerprints(self) -> List[uint32]: + def pk_fingerprints(self) -> list[uint32]: if self.keyring_is_locked: raise RuntimeError("get_public_keys cannot return public keys because the keyring is locked") else: @@ -111,13 +111,13 @@ class GetPrivateKeyResponse(Streamable): @streamable @dataclass(frozen=True) class GenerateMnemonicResponse(Streamable): - mnemonic: List[str] + mnemonic: list[str] @streamable @dataclass(frozen=True) class AddKey(Streamable): - mnemonic: List[str] + mnemonic: list[str] @streamable @@ -189,7 +189,7 @@ class GetTimestampForHeightResponse(Streamable): @streamable @dataclass(frozen=True) class GetNotifications(Streamable): - ids: Optional[List[bytes32]] = None + ids: Optional[list[bytes32]] = None start: Optional[uint32] = None end: Optional[uint32] = None @@ -197,7 +197,7 @@ class GetNotifications(Streamable): @streamable @dataclass(frozen=True) class GetNotificationsResponse(Streamable): - notifications: List[Notification] + notifications: list[Notification] @streamable @@ -228,17 +228,17 @@ class GetTransactionMemo(Streamable): @dataclass(frozen=True) class CoinIDWithMemos(Streamable): coin_id: bytes32 - memos: List[bytes] + memos: list[bytes] @streamable @dataclass(frozen=True) class GetTransactionMemoResponse(Streamable): transaction_id: bytes32 - coins_with_memos: List[CoinIDWithMemos] + coins_with_memos: list[CoinIDWithMemos] # TODO: deprecate the kinda silly format of this RPC and delete these functions - def to_json_dict(self) -> Dict[str, Any]: + def to_json_dict(self) -> dict[str, Any]: return { self.transaction_id.hex(): { cwm.coin_id.hex(): [memo.hex() for memo in cwm.memos] for cwm in self.coins_with_memos @@ -246,7 +246,7 @@ def to_json_dict(self) -> Dict[str, Any]: } @classmethod - def from_json_dict(cls, json_dict: Dict[str, Any]) -> GetTransactionMemoResponse: + def from_json_dict(cls, json_dict: dict[str, Any]) -> GetTransactionMemoResponse: return cls( bytes32.from_hexstr(list(json_dict.keys())[0]), [ @@ -275,7 +275,7 @@ class DefaultCAT(Streamable): @streamable @dataclass(frozen=True) class GetCATListResponse(Streamable): - cat_list: List[DefaultCAT] + cat_list: list[DefaultCAT] @streamable @@ -304,7 +304,7 @@ class DIDGetRecoveryInfoResponse(Streamable): coin_name: bytes32 newpuzhash: bytes32 pubkey: G1Element - backup_dids: List[bytes32] + backup_dids: list[bytes32] @streamable @@ -355,7 +355,7 @@ class NFTWalletWithDID(Streamable): @streamable @dataclass(frozen=True) class NFTGetWalletsWithDIDsResponse(Streamable): - nft_wallets: List[NFTWalletWithDID] + nft_wallets: list[NFTWalletWithDID] # utility for NFTSetDIDBulk @@ -369,7 +369,7 @@ class NFTCoin(Streamable): @streamable @dataclass(frozen=True) class GatherSigningInfo(Streamable): - spends: List[Spend] + spends: list[Spend] @streamable @@ -381,26 +381,26 @@ class GatherSigningInfoResponse(Streamable): @streamable @dataclass(frozen=True) class ApplySignatures(Streamable): - spends: List[Spend] - signing_responses: List[SigningResponse] + spends: list[Spend] + signing_responses: list[SigningResponse] @streamable @dataclass(frozen=True) class ApplySignaturesResponse(Streamable): - signed_transactions: List[SignedTransaction] + signed_transactions: list[SignedTransaction] @streamable @dataclass(frozen=True) class SubmitTransactions(Streamable): - signed_transactions: List[SignedTransaction] + signed_transactions: list[SignedTransaction] @streamable @dataclass(frozen=True) class SubmitTransactionsResponse(Streamable): - mempool_ids: List[bytes32] + mempool_ids: list[bytes32] @streamable @@ -413,7 +413,7 @@ class ExecuteSigningInstructions(Streamable): @streamable @dataclass(frozen=True) class ExecuteSigningInstructionsResponse(Streamable): - signing_responses: List[SigningResponse] + signing_responses: list[SigningResponse] # When inheriting from this class you must set any non default arguments with: @@ -426,7 +426,7 @@ class TransactionEndpointRequest(Streamable): push: Optional[bool] = None sign: Optional[bool] = None - def to_json_dict(self, _avoid_ban: bool = False) -> Dict[str, Any]: + def to_json_dict(self, _avoid_ban: bool = False) -> dict[str, Any]: if not _avoid_ban: raise NotImplementedError( "to_json_dict is banned on TransactionEndpointRequest, please use .json_serialize_for_transport" @@ -435,8 +435,8 @@ def to_json_dict(self, _avoid_ban: bool = False) -> Dict[str, Any]: return super().to_json_dict() def json_serialize_for_transport( - self, tx_config: TXConfig, extra_conditions: Tuple[Condition, ...], timelock_info: ConditionValidTimes - ) -> Dict[str, Any]: + self, tx_config: TXConfig, extra_conditions: tuple[Condition, ...], timelock_info: ConditionValidTimes + ) -> dict[str, Any]: return { **tx_config.to_json_dict(), **timelock_info.to_json_dict(), @@ -448,20 +448,20 @@ def json_serialize_for_transport( @streamable @dataclass(frozen=True) class TransactionEndpointResponse(Streamable): - unsigned_transactions: List[UnsignedTransaction] - transactions: List[TransactionRecord] + unsigned_transactions: list[UnsignedTransaction] + transactions: list[TransactionRecord] @streamable @dataclass(frozen=True) class PushTransactions(TransactionEndpointRequest): - transactions: List[TransactionRecord] = field(default_factory=default_raise) + transactions: list[TransactionRecord] = field(default_factory=default_raise) push: Optional[bool] = True # We allow for flexibility in transaction parsing here so we need to override @classmethod - def from_json_dict(cls, json_dict: Dict[str, Any]) -> PushTransactions: - transactions: List[TransactionRecord] = [] + def from_json_dict(cls, json_dict: dict[str, Any]) -> PushTransactions: + transactions: list[TransactionRecord] = [] for transaction_hexstr_or_json in json_dict["transactions"]: if isinstance(transaction_hexstr_or_json, str): tx = TransactionRecord.from_bytes(hexstr_to_bytes(transaction_hexstr_or_json)) @@ -503,7 +503,7 @@ class CombineCoins(TransactionEndpointRequest): wallet_id: uint32 = field(default_factory=default_raise) number_of_coins: uint16 = uint16(500) largest_first: bool = False - target_coin_ids: List[bytes32] = field(default_factory=list) + target_coin_ids: list[bytes32] = field(default_factory=list) target_coin_amount: Optional[uint64] = None coin_num_limit: uint16 = uint16(500) @@ -517,14 +517,14 @@ class CombineCoinsResponse(TransactionEndpointResponse): @streamable @kw_only_dataclass class NFTSetDIDBulk(TransactionEndpointRequest): - nft_coin_list: List[NFTCoin] = field(default_factory=default_raise) + nft_coin_list: list[NFTCoin] = field(default_factory=default_raise) did_id: Optional[str] = None @streamable @dataclass(frozen=True) class NFTSetDIDBulkResponse(TransactionEndpointResponse): - wallet_id: List[uint32] + wallet_id: list[uint32] tx_num: uint16 spend_bundle: WalletSpendBundle @@ -532,14 +532,14 @@ class NFTSetDIDBulkResponse(TransactionEndpointResponse): @streamable @kw_only_dataclass class NFTTransferBulk(TransactionEndpointRequest): - nft_coin_list: List[NFTCoin] = field(default_factory=default_raise) + nft_coin_list: list[NFTCoin] = field(default_factory=default_raise) target_address: str = field(default_factory=default_raise) @streamable @dataclass(frozen=True) class NFTTransferBulkResponse(TransactionEndpointResponse): - wallet_id: List[uint32] + wallet_id: list[uint32] tx_num: uint16 spend_bundle: WalletSpendBundle @@ -563,7 +563,7 @@ class SendTransactionMultiResponse(TransactionEndpointResponse): @streamable @dataclass(frozen=True) class CreateSignedTransactionsResponse(TransactionEndpointResponse): - signed_txs: List[TransactionRecord] + signed_txs: list[TransactionRecord] signed_tx: TransactionRecord @@ -607,7 +607,7 @@ class _OfferEndpointResponse(TransactionEndpointResponse): trade_record: TradeRecord @classmethod - def from_json_dict(cls: Type[_T_OfferEndpointResponse], json_dict: Dict[str, Any]) -> _T_OfferEndpointResponse: + def from_json_dict(cls: type[_T_OfferEndpointResponse], json_dict: dict[str, Any]) -> _T_OfferEndpointResponse: tx_endpoint: TransactionEndpointResponse = json_deserialize_with_clvm_streamable( json_dict, TransactionEndpointResponse ) @@ -677,7 +677,7 @@ class NFTSetNFTDIDResponse(TransactionEndpointResponse): @dataclass(frozen=True) class NFTMintBulkResponse(TransactionEndpointResponse): spend_bundle: WalletSpendBundle - nft_id_list: List[str] + nft_id_list: list[str] @streamable @@ -730,7 +730,7 @@ class DAOAddFundsToTreasuryResponse(TransactionEndpointResponse): @dataclass(frozen=True) class DAOSendToLockupResponse(TransactionEndpointResponse): tx_id: bytes32 - txs: List[TransactionRecord] + txs: list[TransactionRecord] @streamable diff --git a/chia/rpc/wallet_rpc_api.py b/chia/rpc/wallet_rpc_api.py index 68b9c9f0be52..56f32200f0c1 100644 --- a/chia/rpc/wallet_rpc_api.py +++ b/chia/rpc/wallet_rpc_api.py @@ -5,7 +5,7 @@ import logging import zlib from pathlib import Path -from typing import Any, ClassVar, Dict, List, Optional, Set, Tuple, Union +from typing import Any, ClassVar, Optional, Union from chia_rs import AugSchemeMPL, G1Element, G2Element, PrivateKey from clvm_tools.binutils import assemble @@ -173,7 +173,7 @@ def __init__(self, wallet_node: WalletNode): self.service = wallet_node self.service_name = "chia_wallet" - def get_routes(self) -> Dict[str, Endpoint]: + def get_routes(self) -> dict[str, Endpoint]: return { # Key management "/log_in": self.log_in, @@ -328,10 +328,10 @@ def get_routes(self) -> Dict[str, Endpoint]: "/execute_signing_instructions": self.execute_signing_instructions, } - def get_connections(self, request_node_type: Optional[NodeType]) -> List[Dict[str, Any]]: + def get_connections(self, request_node_type: Optional[NodeType]) -> list[dict[str, Any]]: return default_get_connections(server=self.service.server, request_node_type=request_node_type) - async def _state_changed(self, change: str, change_data: Optional[Dict[str, Any]]) -> List[WsRpcMessage]: + async def _state_changed(self, change: str, change_data: Optional[dict[str, Any]]) -> list[WsRpcMessage]: """ Called by the WalletNode or WalletStateManager when something has changed in the wallet. This gives us an opportunity to send notifications to all connected clients via WebSocket. @@ -364,8 +364,8 @@ async def _convert_tx_puzzle_hash(self, tx: TransactionRecord) -> TransactionRec async def get_latest_singleton_coin_spend( self, peer: WSChiaConnection, coin_id: bytes32, latest: bool = True - ) -> Tuple[CoinSpend, CoinState]: - coin_state_list: List[CoinState] = await self.service.wallet_state_manager.wallet_node.get_coin_state( + ) -> tuple[CoinSpend, CoinState]: + coin_state_list: list[CoinState] = await self.service.wallet_state_manager.wallet_node.get_coin_state( [coin_id], peer=peer ) if coin_state_list is None or len(coin_state_list) < 1: @@ -387,7 +387,7 @@ async def get_latest_singleton_coin_spend( raise ValueError("Cannot find child coin, please wait then retry.") coin_state = odd_coin # Get parent coin - parent_coin_state_list: List[CoinState] = await self.service.wallet_state_manager.wallet_node.get_coin_state( + parent_coin_state_list: list[CoinState] = await self.service.wallet_state_manager.wallet_node.get_coin_state( [coin_state.coin.parent_coin_info], peer=peer ) if parent_coin_state_list is None or len(parent_coin_state_list) < 1: @@ -437,7 +437,7 @@ async def get_public_keys(self, request: Empty) -> GetPublicKeysResponse: else: return GetPublicKeysResponse(keyring_is_locked=False, public_key_fingerprints=fingerprints) - async def _get_private_key(self, fingerprint: int) -> Tuple[Optional[PrivateKey], Optional[bytes]]: + async def _get_private_key(self, fingerprint: int) -> tuple[Optional[PrivateKey], Optional[bytes]]: try: all_keys = await self.service.keychain_proxy.get_all_private_keys() for sk, seed in all_keys: @@ -509,7 +509,7 @@ async def delete_key(self, request: DeleteKey) -> Empty: async def _check_key_used_for_rewards( self, new_root: Path, sk: PrivateKey, max_ph_to_search: int - ) -> Tuple[bool, bool]: + ) -> tuple[bool, bool]: """Checks if the given key is used for either the farmer rewards or pool rewards returns a tuple of two booleans The first is true if the key is used as the Farmer rewards, otherwise false @@ -519,10 +519,10 @@ async def _check_key_used_for_rewards( if sk is None: return False, False - config: Dict[str, Any] = load_config(new_root, "config.yaml") + config: dict[str, Any] = load_config(new_root, "config.yaml") farmer_target = config["farmer"].get("xch_target_address", "") pool_target = config["pool"].get("xch_target_address", "") - address_to_check: List[bytes32] = [] + address_to_check: list[bytes32] = [] try: farmer_decoded = decode_puzzle_hash(farmer_target) @@ -536,7 +536,7 @@ async def _check_key_used_for_rewards( except ValueError: pool_decoded = None - found_addresses: Set[bytes32] = match_address_to_sk(sk, address_to_check, max_ph_to_search) + found_addresses: set[bytes32] = match_address_to_sk(sk, address_to_check, max_ph_to_search) found_farmer = False found_pool = False @@ -568,7 +568,7 @@ async def check_delete_key(self, request: CheckDeleteKey) -> CheckDeleteKeyRespo await self._stop_wallet() await self.service._start_with_fingerprint(fingerprint=request.fingerprint) - wallets: List[WalletInfo] = await self.service.wallet_state_manager.get_all_wallet_info_entries() + wallets: list[WalletInfo] = await self.service.wallet_state_manager.get_all_wallet_info_entries() for w in wallets: wallet = self.service.wallet_state_manager.wallets[w.id] unspent = await self.service.wallet_state_manager.coin_store.get_unspent_coins_for_wallet(w.id) @@ -644,7 +644,7 @@ async def push_transactions( self, request: PushTransactions, action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> PushTransactionsResponse: if not action_scope.config.push: raise ValueError("Cannot push transactions if push is False") @@ -724,15 +724,15 @@ async def get_auto_claim(self, request: Empty) -> AutoClaimSettings: # Wallet Management ########################################################################################## - async def get_wallets(self, request: Dict[str, Any]) -> EndpointResult: + async def get_wallets(self, request: dict[str, Any]) -> EndpointResult: include_data: bool = request.get("include_data", True) wallet_type: Optional[WalletType] = None if "type" in request: wallet_type = WalletType(request["type"]) - wallets: List[WalletInfo] = await self.service.wallet_state_manager.get_all_wallet_info_entries(wallet_type) + wallets: list[WalletInfo] = await self.service.wallet_state_manager.get_all_wallet_info_entries(wallet_type) if not include_data: - result: List[WalletInfo] = [] + result: list[WalletInfo] = [] for wallet in wallets: result.append(WalletInfo(wallet.id, wallet.name, wallet.type, "")) wallets = result @@ -761,9 +761,9 @@ async def get_wallets(self, request: Dict[str, Any]) -> EndpointResult: @tx_endpoint(push=True) async def create_new_wallet( self, - request: Dict[str, Any], + request: dict[str, Any], action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> EndpointResult: wallet_state_manager = self.service.wallet_state_manager @@ -821,7 +821,7 @@ async def create_new_wallet( backup_dids.append(decode_puzzle_hash(d)) if len(backup_dids) > 0: num_needed = uint64(request["num_of_backup_ids_needed"]) - metadata: Dict[str, str] = {} + metadata: dict[str, str] = {} if "metadata" in request: if type(request["metadata"]) is dict: metadata = request["metadata"] @@ -1029,7 +1029,7 @@ async def create_new_wallet( # Wallet ########################################################################################## - async def _get_wallet_balance(self, wallet_id: uint32) -> Dict[str, Any]: + async def _get_wallet_balance(self, wallet_id: uint32) -> dict[str, Any]: wallet = self.service.wallet_state_manager.wallets[wallet_id] balance = await self.service.get_balance(wallet_id) wallet_balance = balance.to_json_dict() @@ -1046,22 +1046,22 @@ async def _get_wallet_balance(self, wallet_id: uint32) -> Dict[str, Any]: return wallet_balance - async def get_wallet_balance(self, request: Dict[str, Any]) -> EndpointResult: + async def get_wallet_balance(self, request: dict[str, Any]) -> EndpointResult: wallet_id = uint32(int(request["wallet_id"])) wallet_balance = await self._get_wallet_balance(wallet_id) return {"wallet_balance": wallet_balance} - async def get_wallet_balances(self, request: Dict[str, Any]) -> EndpointResult: + async def get_wallet_balances(self, request: dict[str, Any]) -> EndpointResult: try: - wallet_ids: List[uint32] = [uint32(int(wallet_id)) for wallet_id in request["wallet_ids"]] + wallet_ids: list[uint32] = [uint32(int(wallet_id)) for wallet_id in request["wallet_ids"]] except (TypeError, KeyError): wallet_ids = list(self.service.wallet_state_manager.wallets.keys()) - wallet_balances: Dict[uint32, Dict[str, Any]] = {} + wallet_balances: dict[uint32, dict[str, Any]] = {} for wallet_id in wallet_ids: wallet_balances[wallet_id] = await self._get_wallet_balance(wallet_id) return {"wallet_balances": wallet_balances} - async def get_transaction(self, request: Dict[str, Any]) -> EndpointResult: + async def get_transaction(self, request: dict[str, Any]) -> EndpointResult: transaction_id: bytes32 = bytes32.from_hexstr(request["transaction_id"]) tr: Optional[TransactionRecord] = await self.service.wallet_state_manager.get_transaction(transaction_id) if tr is None: @@ -1072,7 +1072,7 @@ async def get_transaction(self, request: Dict[str, Any]) -> EndpointResult: "transaction_id": tr.name, } - async def get_transaction_memo(self, request: Dict[str, Any]) -> EndpointResult: + async def get_transaction_memo(self, request: dict[str, Any]) -> EndpointResult: transaction_id: bytes32 = bytes32.from_hexstr(request["transaction_id"]) tr: Optional[TransactionRecord] = await self.service.wallet_state_manager.get_transaction(transaction_id) if tr is None: @@ -1082,7 +1082,7 @@ async def get_transaction_memo(self, request: Dict[str, Any]) -> EndpointResult: # Fetch incoming tx coin spend peer = self.service.get_full_node_peer() assert len(tr.additions) == 1 - coin_state_list: List[CoinState] = await self.service.wallet_state_manager.wallet_node.get_coin_state( + coin_state_list: list[CoinState] = await self.service.wallet_state_manager.wallet_node.get_coin_state( [tr.additions[0].parent_coin_info], peer=peer ) assert len(coin_state_list) == 1 @@ -1091,7 +1091,7 @@ async def get_transaction_memo(self, request: Dict[str, Any]) -> EndpointResult: else: raise ValueError(f"Transaction 0x{transaction_id.hex()} doesn't have any coin spend.") assert tr.spend_bundle is not None - memos: Dict[bytes32, List[bytes]] = compute_memos(tr.spend_bundle) + memos: dict[bytes32, list[bytes]] = compute_memos(tr.spend_bundle) response = {} # Convert to hex string for coin_id, memo_list in memos.items(): @@ -1101,7 +1101,7 @@ async def get_transaction_memo(self, request: Dict[str, Any]) -> EndpointResult: @tx_endpoint(push=False) @marshal async def split_coins( - self, request: SplitCoins, action_scope: WalletActionScope, extra_conditions: Tuple[Condition, ...] = tuple() + self, request: SplitCoins, action_scope: WalletActionScope, extra_conditions: tuple[Condition, ...] = tuple() ) -> SplitCoinsResponse: if request.number_of_coins > 500: raise ValueError(f"{request.number_of_coins} coins is greater then the maximum limit of 500 coins.") @@ -1170,7 +1170,7 @@ async def split_coins( @tx_endpoint(push=False) @marshal async def combine_coins( - self, request: CombineCoins, action_scope: WalletActionScope, extra_conditions: Tuple[Condition, ...] = tuple() + self, request: CombineCoins, action_scope: WalletActionScope, extra_conditions: tuple[Condition, ...] = tuple() ) -> CombineCoinsResponse: # Some "number of coins" validation @@ -1189,7 +1189,7 @@ async def combine_coins( if not isinstance(wallet, (Wallet, CATWallet)): raise ValueError("Cannot combine coins from non-fungible wallet types") - coins: List[Coin] = [] + coins: list[Coin] = [] # First get the coin IDs specified if request.target_coin_ids != []: @@ -1271,7 +1271,7 @@ async def combine_coins( return CombineCoinsResponse([], []) # tx_endpoint will take care to fill this out - async def get_transactions(self, request: Dict[str, Any]) -> EndpointResult: + async def get_transactions(self, request: dict[str, Any]) -> EndpointResult: wallet_id = int(request["wallet_id"]) start = request.get("start", 0) @@ -1323,7 +1323,7 @@ async def get_transactions(self, request: Dict[str, Any]) -> EndpointResult: "wallet_id": wallet_id, } - async def get_transaction_count(self, request: Dict[str, Any]) -> EndpointResult: + async def get_transaction_count(self, request: dict[str, Any]) -> EndpointResult: wallet_id = int(request["wallet_id"]) type_filter = None if "type_filter" in request: @@ -1336,7 +1336,7 @@ async def get_transaction_count(self, request: Dict[str, Any]) -> EndpointResult "wallet_id": wallet_id, } - async def get_next_address(self, request: Dict[str, Any]) -> EndpointResult: + async def get_next_address(self, request: dict[str, Any]) -> EndpointResult: """ Returns a new address """ @@ -1367,9 +1367,9 @@ async def get_next_address(self, request: Dict[str, Any]) -> EndpointResult: @tx_endpoint(push=True) async def send_transaction( self, - request: Dict[str, Any], + request: dict[str, Any], action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> EndpointResult: if await self.service.wallet_state_manager.synced() is False: raise ValueError("Wallet needs to be fully synced before sending transactions") @@ -1387,7 +1387,7 @@ async def send_transaction( raise ValueError("Unexpected Address Prefix") puzzle_hash: bytes32 = decode_puzzle_hash(address) - memos: List[bytes] = [] + memos: list[bytes] = [] if "memos" in request: memos = [mem.encode("utf-8") for mem in request["memos"]] @@ -1411,7 +1411,7 @@ async def send_transaction( "transaction_id": None, # tx_endpoint wrapper will take care of this } - async def send_transaction_multi(self, request: Dict[str, Any]) -> EndpointResult: + async def send_transaction_multi(self, request: dict[str, Any]) -> EndpointResult: if await self.service.wallet_state_manager.synced() is False: raise ValueError("Wallet needs to be fully synced before sending transactions") @@ -1444,9 +1444,9 @@ async def send_transaction_multi(self, request: Dict[str, Any]) -> EndpointResul @tx_endpoint(push=True, merge_spends=False) async def spend_clawback_coins( self, - request: Dict[str, Any], + request: dict[str, Any], action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> EndpointResult: """Spend clawback coins that were sent (to claw them back) or received (to claim them). @@ -1457,7 +1457,7 @@ async def spend_clawback_coins( """ if "coin_ids" not in request: raise ValueError("Coin IDs are required.") - coin_ids: List[bytes32] = [bytes32.from_hexstr(coin) for coin in request["coin_ids"]] + coin_ids: list[bytes32] = [bytes32.from_hexstr(coin) for coin in request["coin_ids"]] tx_fee: uint64 = uint64(request.get("fee", 0)) # Get inner puzzle coin_records = await self.service.wallet_state_manager.coin_store.get_coin_records( @@ -1467,7 +1467,7 @@ async def spend_clawback_coins( spent_range=UInt32Range(stop=uint32(0)), ) - coins: Dict[Coin, ClawbackMetadata] = {} + coins: dict[Coin, ClawbackMetadata] = {} batch_size = request.get( "batch_size", self.service.wallet_state_manager.config.get("auto_claim", {}).get("batch_size", 50) ) @@ -1514,7 +1514,7 @@ async def spend_clawback_coins( "transactions": None, # tx_endpoint wrapper will take care of this } - async def delete_unconfirmed_transactions(self, request: Dict[str, Any]) -> EndpointResult: + async def delete_unconfirmed_transactions(self, request: dict[str, Any]) -> EndpointResult: wallet_id = uint32(request["wallet_id"]) if wallet_id not in self.service.wallet_state_manager.wallets: raise ValueError(f"Wallet id {wallet_id} does not exist") @@ -1531,14 +1531,14 @@ async def delete_unconfirmed_transactions(self, request: Dict[str, Any]) -> Endp async def select_coins( self, - request: Dict[str, Any], + request: dict[str, Any], ) -> EndpointResult: assert self.service.logged_in_fingerprint is not None tx_config_loader: TXConfigLoader = TXConfigLoader.from_json_dict(request) # Some backwards compat fill-ins if tx_config_loader.excluded_coin_ids is None: - excluded_coins: Optional[List[Dict[str, Any]]] = request.get("excluded_coins", request.get("exclude_coins")) + excluded_coins: Optional[list[dict[str, Any]]] = request.get("excluded_coins", request.get("exclude_coins")) if excluded_coins is not None: tx_config_loader = tx_config_loader.override( excluded_coin_ids=[Coin.from_json_dict(c).name() for c in excluded_coins], @@ -1560,7 +1560,7 @@ async def select_coins( return {"coins": [coin.to_json_dict() for coin in selected_coins]} - async def get_spendable_coins(self, request: Dict[str, Any]) -> EndpointResult: + async def get_spendable_coins(self, request: dict[str, Any]) -> EndpointResult: if await self.service.wallet_state_manager.synced() is False: raise ValueError("Wallet needs to be fully synced before getting all coins") @@ -1569,17 +1569,17 @@ async def get_spendable_coins(self, request: Dict[str, Any]) -> EndpointResult: max_coin_amount: uint64 = uint64(request.get("max_coin_amount", 0)) if max_coin_amount == 0: max_coin_amount = uint64(self.service.wallet_state_manager.constants.MAX_COIN_AMOUNT) - excluded_coin_amounts: Optional[List[uint64]] = request.get("excluded_coin_amounts") + excluded_coin_amounts: Optional[list[uint64]] = request.get("excluded_coin_amounts") if excluded_coin_amounts is not None: excluded_coin_amounts = [uint64(a) for a in excluded_coin_amounts] else: excluded_coin_amounts = [] - excluded_coins_input: Optional[Dict[str, Dict[str, Any]]] = request.get("excluded_coins") + excluded_coins_input: Optional[dict[str, dict[str, Any]]] = request.get("excluded_coins") if excluded_coins_input is not None: excluded_coins = [Coin.from_json_dict(json_coin) for json_coin in excluded_coins_input] else: excluded_coins = [] - excluded_coin_ids_input: Optional[List[str]] = request.get("excluded_coin_ids") + excluded_coin_ids_input: Optional[list[str]] = request.get("excluded_coin_ids") if excluded_coin_ids_input is not None: excluded_coin_ids = [bytes32.from_hexstr(hex_id) for hex_id in excluded_coin_ids_input] else: @@ -1590,27 +1590,27 @@ async def get_spendable_coins(self, request: Dict[str, Any]) -> EndpointResult: all_coin_records = await state_mgr.coin_store.get_unspent_coins_for_wallet(wallet_id) if wallet.type() in {WalletType.CAT, WalletType.CRCAT}: assert isinstance(wallet, CATWallet) - spendable_coins: List[WalletCoinRecord] = await wallet.get_cat_spendable_coins(all_coin_records) + spendable_coins: list[WalletCoinRecord] = await wallet.get_cat_spendable_coins(all_coin_records) else: spendable_coins = list(await state_mgr.get_spendable_coins_for_wallet(wallet_id, all_coin_records)) # Now we get the unconfirmed transactions and manually derive the additions and removals. - unconfirmed_transactions: List[TransactionRecord] = await state_mgr.tx_store.get_unconfirmed_for_wallet( + unconfirmed_transactions: list[TransactionRecord] = await state_mgr.tx_store.get_unconfirmed_for_wallet( wallet_id ) - unconfirmed_removal_ids: Dict[bytes32, uint64] = { + unconfirmed_removal_ids: dict[bytes32, uint64] = { coin.name(): transaction.created_at_time for transaction in unconfirmed_transactions for coin in transaction.removals } - unconfirmed_additions: List[Coin] = [ + unconfirmed_additions: list[Coin] = [ coin for transaction in unconfirmed_transactions for coin in transaction.additions if await state_mgr.does_coin_belong_to_wallet(coin, wallet_id) ] - valid_spendable_cr: List[CoinRecord] = [] - unconfirmed_removals: List[CoinRecord] = [] + valid_spendable_cr: list[CoinRecord] = [] + unconfirmed_removals: list[CoinRecord] = [] for coin_record in all_coin_records: if coin_record.name() in unconfirmed_removal_ids: unconfirmed_removals.append(coin_record.to_coin_record(unconfirmed_removal_ids[coin_record.name()])) @@ -1635,14 +1635,14 @@ async def get_spendable_coins(self, request: Dict[str, Any]) -> EndpointResult: "unconfirmed_additions": [coin.to_json_dict() for coin in unconfirmed_additions], } - async def get_coin_records_by_names(self, request: Dict[str, Any]) -> EndpointResult: + async def get_coin_records_by_names(self, request: dict[str, Any]) -> EndpointResult: if await self.service.wallet_state_manager.synced() is False: raise ValueError("Wallet needs to be fully synced before finding coin information") if "names" not in request: raise ValueError("Names not in request") coin_ids = [bytes32.from_hexstr(name) for name in request["names"]] - kwargs: Dict[str, Any] = { + kwargs: dict[str, Any] = { "coin_id_filter": HashFilter.include(coin_ids), } @@ -1658,10 +1658,10 @@ async def get_coin_records_by_names(self, request: Dict[str, Any]) -> EndpointRe kwargs["spent_range"] = unspent_range async with self.service.wallet_state_manager.lock: - coin_records: List[CoinRecord] = await self.service.wallet_state_manager.get_coin_records_by_coin_ids( + coin_records: list[CoinRecord] = await self.service.wallet_state_manager.get_coin_records_by_coin_ids( **kwargs ) - missed_coins: List[str] = [ + missed_coins: list[str] = [ "0x" + c_id.hex() for c_id in coin_ids if c_id not in [cr.name for cr in coin_records] ] if missed_coins: @@ -1669,14 +1669,14 @@ async def get_coin_records_by_names(self, request: Dict[str, Any]) -> EndpointRe return {"coin_records": [cr.to_json_dict() for cr in coin_records]} - async def get_current_derivation_index(self, request: Dict[str, Any]) -> Dict[str, Any]: + async def get_current_derivation_index(self, request: dict[str, Any]) -> dict[str, Any]: assert self.service.wallet_state_manager is not None index: Optional[uint32] = await self.service.wallet_state_manager.puzzle_store.get_last_derivation_path() return {"success": True, "index": index} - async def extend_derivation_index(self, request: Dict[str, Any]) -> Dict[str, Any]: + async def extend_derivation_index(self, request: dict[str, Any]) -> dict[str, Any]: assert self.service.wallet_state_manager is not None # Require a new max derivation index @@ -1720,7 +1720,7 @@ async def extend_derivation_index(self, request: Dict[str, Any]) -> Dict[str, An @marshal async def get_notifications(self, request: GetNotifications) -> GetNotificationsResponse: if request.ids is None: - notifications: List[Notification] = ( + notifications: list[Notification] = ( await self.service.wallet_state_manager.notification_manager.notification_store.get_all_notifications( pagination=(request.start, request.end) ) @@ -1734,8 +1734,8 @@ async def get_notifications(self, request: GetNotifications) -> GetNotifications return GetNotificationsResponse(notifications) - async def delete_notifications(self, request: Dict[str, Any]) -> EndpointResult: - ids: Optional[List[str]] = request.get("ids", None) + async def delete_notifications(self, request: dict[str, Any]) -> EndpointResult: + ids: Optional[list[str]] = request.get("ids", None) if ids is None: await self.service.wallet_state_manager.notification_manager.notification_store.delete_all_notifications() else: @@ -1748,9 +1748,9 @@ async def delete_notifications(self, request: Dict[str, Any]) -> EndpointResult: @tx_endpoint(push=True) async def send_notification( self, - request: Dict[str, Any], + request: dict[str, Any], action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> EndpointResult: await self.service.wallet_state_manager.notification_manager.send_new_notification( bytes32.from_hexstr(request["target"]), @@ -1763,7 +1763,7 @@ async def send_notification( return {"tx": None, "transactions": None} # tx_endpoint wrapper will take care of this - async def verify_signature(self, request: Dict[str, Any]) -> EndpointResult: + async def verify_signature(self, request: dict[str, Any]) -> EndpointResult: """ Given a public key, message and signature, verify if it is valid. :param request: @@ -1821,7 +1821,7 @@ async def verify_signature(self, request: Dict[str, Any]) -> EndpointResult: else: return {"isValid": False, "error": "Signature is invalid."} - async def sign_message_by_address(self, request: Dict[str, Any]) -> EndpointResult: + async def sign_message_by_address(self, request: dict[str, Any]) -> EndpointResult: """ Given a derived P2 address, sign the message by its private key. :param request: @@ -1851,7 +1851,7 @@ async def sign_message_by_address(self, request: Dict[str, Any]) -> EndpointResu "signing_mode": mode.value, } - async def sign_message_by_id(self, request: Dict[str, Any]) -> EndpointResult: + async def sign_message_by_id(self, request: dict[str, Any]) -> EndpointResult: """ Given a NFT/DID ID, sign the message by the P2 private key. :param request: @@ -1916,22 +1916,22 @@ async def sign_message_by_id(self, request: Dict[str, Any]) -> EndpointResult: # CATs and Trading ########################################################################################## - async def get_cat_list(self, request: Dict[str, Any]) -> EndpointResult: + async def get_cat_list(self, request: dict[str, Any]) -> EndpointResult: return {"cat_list": list(DEFAULT_CATS.values())} - async def cat_set_name(self, request: Dict[str, Any]) -> EndpointResult: + async def cat_set_name(self, request: dict[str, Any]) -> EndpointResult: wallet_id = uint32(request["wallet_id"]) wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=CATWallet) await wallet.set_name(str(request["name"])) return {"wallet_id": wallet_id} - async def cat_get_name(self, request: Dict[str, Any]) -> EndpointResult: + async def cat_get_name(self, request: dict[str, Any]) -> EndpointResult: wallet_id = uint32(request["wallet_id"]) wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=CATWallet) name: str = wallet.get_name() return {"wallet_id": wallet_id, "name": name} - async def get_stray_cats(self, request: Dict[str, Any]) -> EndpointResult: + async def get_stray_cats(self, request: dict[str, Any]) -> EndpointResult: """ Get a list of all unacknowledged CATs :param request: RPC request @@ -1943,9 +1943,9 @@ async def get_stray_cats(self, request: Dict[str, Any]) -> EndpointResult: @tx_endpoint(push=True) async def cat_spend( self, - request: Dict[str, Any], + request: dict[str, Any], action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), hold_lock: bool = True, ) -> EndpointResult: if await self.service.wallet_state_manager.synced() is False: @@ -1953,10 +1953,10 @@ async def cat_spend( wallet_id = uint32(request["wallet_id"]) wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=CATWallet) - amounts: List[uint64] = [] - puzzle_hashes: List[bytes32] = [] - memos: List[List[bytes]] = [] - additions: Optional[List[Dict[str, Any]]] = request.get("additions") + amounts: list[uint64] = [] + puzzle_hashes: list[bytes32] = [] + memos: list[list[bytes]] = [] + additions: Optional[list[dict[str, Any]]] = request.get("additions") if not isinstance(request["fee"], int) or (additions is None and not isinstance(request["amount"], int)): raise ValueError("An integer amount or fee is required (too many decimals)") if additions is not None: @@ -1976,17 +1976,17 @@ async def cat_spend( puzzle_hashes.append(decode_puzzle_hash(request["inner_address"])) if "memos" in request: memos.append([mem.encode("utf-8") for mem in request["memos"]]) - coins: Optional[Set[Coin]] = None + coins: Optional[set[Coin]] = None if "coins" in request and len(request["coins"]) > 0: coins = {Coin.from_json_dict(coin_json) for coin_json in request["coins"]} fee: uint64 = uint64(request.get("fee", 0)) - cat_discrepancy_params: Tuple[Optional[int], Optional[str], Optional[str]] = ( + cat_discrepancy_params: tuple[Optional[int], Optional[str], Optional[str]] = ( request.get("extra_delta", None), request.get("tail_reveal", None), request.get("tail_solution", None), ) - cat_discrepancy: Optional[Tuple[int, Program, Program]] = None + cat_discrepancy: Optional[tuple[int, Program, Program]] = None if cat_discrepancy_params != (None, None, None): if None in cat_discrepancy_params: raise ValueError("Specifying extra_delta, tail_reveal, or tail_solution requires specifying the others") @@ -2029,13 +2029,13 @@ async def cat_spend( "transaction_id": None, # tx_endpoint wrapper will take care of this } - async def cat_get_asset_id(self, request: Dict[str, Any]) -> EndpointResult: + async def cat_get_asset_id(self, request: dict[str, Any]) -> EndpointResult: wallet_id = uint32(request["wallet_id"]) wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=CATWallet) asset_id: str = wallet.get_asset_id() return {"asset_id": asset_id, "wallet_id": wallet_id} - async def cat_asset_id_to_name(self, request: Dict[str, Any]) -> EndpointResult: + async def cat_asset_id_to_name(self, request: dict[str, Any]) -> EndpointResult: wallet = await self.service.wallet_state_manager.get_wallet_for_asset_id(request["asset_id"]) if wallet is None: if request["asset_id"] in DEFAULT_CATS: @@ -2048,17 +2048,17 @@ async def cat_asset_id_to_name(self, request: Dict[str, Any]) -> EndpointResult: @tx_endpoint(push=False) async def create_offer_for_ids( self, - request: Dict[str, Any], + request: dict[str, Any], action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> EndpointResult: if action_scope.config.push: raise ValueError("Cannot push an incomplete spend") # pragma: no cover - offer: Dict[str, int] = request["offer"] + offer: dict[str, int] = request["offer"] fee: uint64 = uint64(request.get("fee", 0)) validate_only: bool = request.get("validate_only", False) - driver_dict_str: Optional[Dict[str, Any]] = request.get("driver_dict", None) + driver_dict_str: Optional[dict[str, Any]] = request.get("driver_dict", None) marshalled_solver = request.get("solver") solver: Optional[Solver] if marshalled_solver is None: @@ -2067,7 +2067,7 @@ async def create_offer_for_ids( solver = Solver(info=marshalled_solver) # This driver_dict construction is to maintain backward compatibility where everything is assumed to be a CAT - driver_dict: Dict[bytes32, PuzzleInfo] = {} + driver_dict: dict[bytes32, PuzzleInfo] = {} if driver_dict_str is None: for key, amount in offer.items(): if amount > 0: @@ -2081,7 +2081,7 @@ async def create_offer_for_ids( for key, value in driver_dict_str.items(): driver_dict[bytes32.from_hexstr(key)] = PuzzleInfo(value) - modified_offer: Dict[Union[int, bytes32], int] = {} + modified_offer: dict[Union[int, bytes32], int] = {} for key in offer: try: modified_offer[bytes32.from_hexstr(key)] = offer[key] @@ -2107,7 +2107,7 @@ async def create_offer_for_ids( } raise ValueError(result[2]) - async def get_offer_summary(self, request: Dict[str, Any]) -> EndpointResult: + async def get_offer_summary(self, request: dict[str, Any]) -> EndpointResult: offer_hex: str = request["offer"] ### @@ -2186,7 +2186,7 @@ async def get_offer_summary(self, request: Dict[str, Any]) -> EndpointResult: }, } - async def check_offer_validity(self, request: Dict[str, Any]) -> EndpointResult: + async def check_offer_validity(self, request: dict[str, Any]) -> EndpointResult: offer_hex: str = request["offer"] ### @@ -2218,9 +2218,9 @@ async def check_offer_validity(self, request: Dict[str, Any]) -> EndpointResult: @tx_endpoint(push=True) async def take_offer( self, - request: Dict[str, Any], + request: dict[str, Any], action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> EndpointResult: offer_hex: str = request["offer"] @@ -2245,7 +2245,7 @@ async def take_offer( offer = Offer.from_bech32(offer_hex) fee: uint64 = uint64(request.get("fee", 0)) - maybe_marshalled_solver: Optional[Dict[str, Any]] = request.get("solver") + maybe_marshalled_solver: Optional[dict[str, Any]] = request.get("solver") solver: Optional[Solver] if maybe_marshalled_solver is None: solver = None @@ -2274,7 +2274,7 @@ async def take_offer( "signing_responses": None, # tx_endpoint wrapper will take care of this } - async def get_offer(self, request: Dict[str, Any]) -> EndpointResult: + async def get_offer(self, request: dict[str, Any]) -> EndpointResult: trade_mgr = self.service.wallet_state_manager.trade_manager trade_id = bytes32.from_hexstr(request["trade_id"]) @@ -2287,7 +2287,7 @@ async def get_offer(self, request: Dict[str, Any]) -> EndpointResult: offer_value: Optional[str] = Offer.from_bytes(offer_to_return).to_bech32() if file_contents else None return {"trade_record": trade_record.to_json_dict_convenience(), "offer": offer_value} - async def get_all_offers(self, request: Dict[str, Any]) -> EndpointResult: + async def get_all_offers(self, request: dict[str, Any]) -> EndpointResult: trade_mgr = self.service.wallet_state_manager.trade_manager start: int = request.get("start", 0) @@ -2309,7 +2309,7 @@ async def get_all_offers(self, request: Dict[str, Any]) -> EndpointResult: include_completed=include_completed, ) result = [] - offer_values: Optional[List[str]] = [] if file_contents else None + offer_values: Optional[list[str]] = [] if file_contents else None for trade in all_trades: result.append(trade.to_json_dict_convenience()) if file_contents and offer_values is not None: @@ -2318,7 +2318,7 @@ async def get_all_offers(self, request: Dict[str, Any]) -> EndpointResult: return {"trade_records": result, "offers": offer_values} - async def get_offers_count(self, request: Dict[str, Any]) -> EndpointResult: + async def get_offers_count(self, request: dict[str, Any]) -> EndpointResult: trade_mgr = self.service.wallet_state_manager.trade_manager (total, my_offers_count, taken_offers_count) = await trade_mgr.trade_store.get_trades_count() @@ -2328,9 +2328,9 @@ async def get_offers_count(self, request: Dict[str, Any]) -> EndpointResult: @tx_endpoint(push=True) async def cancel_offer( self, - request: Dict[str, Any], + request: dict[str, Any], action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> EndpointResult: wsm = self.service.wallet_state_manager secure = request["secure"] @@ -2346,9 +2346,9 @@ async def cancel_offer( @tx_endpoint(push=True, merge_spends=False) async def cancel_offers( self, - request: Dict[str, Any], + request: dict[str, Any], action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> EndpointResult: secure = request["secure"] batch_fee: uint64 = uint64(request.get("batch_fee", 0)) @@ -2368,7 +2368,7 @@ async def cancel_offers( if asset_id is not None and asset_id != "xch": key = bytes32.from_hexstr(asset_id) while True: - records: Dict[bytes32, TradeRecord] = {} + records: dict[bytes32, TradeRecord] = {} trades = await trade_mgr.trade_store.get_trades_between( start, end, @@ -2413,13 +2413,13 @@ async def cancel_offers( # Distributed Identities ########################################################################################## - async def did_set_wallet_name(self, request: Dict[str, Any]) -> EndpointResult: + async def did_set_wallet_name(self, request: dict[str, Any]) -> EndpointResult: wallet_id = uint32(request["wallet_id"]) wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=DIDWallet) await wallet.set_name(str(request["name"])) return {"success": True, "wallet_id": wallet_id} - async def did_get_wallet_name(self, request: Dict[str, Any]) -> EndpointResult: + async def did_get_wallet_name(self, request: dict[str, Any]) -> EndpointResult: wallet_id = uint32(request["wallet_id"]) wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=DIDWallet) name: str = wallet.get_name() # type: ignore[no-untyped-call] # Missing hint in `did_wallet.py` @@ -2428,9 +2428,9 @@ async def did_get_wallet_name(self, request: Dict[str, Any]) -> EndpointResult: @tx_endpoint(push=True) async def did_update_recovery_ids( self, - request: Dict[str, Any], + request: dict[str, Any], action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> EndpointResult: wallet_id = uint32(request["wallet_id"]) wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=DIDWallet) @@ -2458,9 +2458,9 @@ async def did_update_recovery_ids( @tx_endpoint(push=False) async def did_message_spend( self, - request: Dict[str, Any], + request: dict[str, Any], action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> EndpointResult: wallet_id = uint32(request["wallet_id"]) wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=DIDWallet) @@ -2479,7 +2479,7 @@ async def did_message_spend( "transactions": None, # tx_endpoint wrapper will take care of this } - async def did_get_info(self, request: Dict[str, Any]) -> EndpointResult: + async def did_get_info(self, request: dict[str, Any]) -> EndpointResult: if "coin_id" not in request: return {"success": False, "error": "Coin ID is required."} coin_id = request["coin_id"] @@ -2519,7 +2519,7 @@ async def did_get_info(self, request: Dict[str, Any]) -> EndpointResult: "hints": hints, } - async def did_find_lost_did(self, request: Dict[str, Any]) -> EndpointResult: + async def did_find_lost_did(self, request: dict[str, Any]) -> EndpointResult: """ Recover a missing or unspendable DID wallet by a coin id of the DID :param coin_id: It can be DID ID, launcher coin ID or any coin ID of the DID you want to find. @@ -2653,7 +2653,7 @@ async def did_find_lost_did(self, request: Dict[str, Any]) -> EndpointResult: if did_wallet is None: # Create DID wallet - response: List[CoinState] = await self.service.get_coin_state([launcher_id], peer=peer) + response: list[CoinState] = await self.service.get_coin_state([launcher_id], peer=peer) if len(response) == 0: return {"success": False, "error": f"Could not find the launch coin with ID: {launcher_id}"} launcher_coin: CoinState = response[0] @@ -2671,7 +2671,7 @@ async def did_find_lost_did(self, request: Dict[str, Any]) -> EndpointResult: # Inner DID puzzle doesn't match, we need to update the DID info full_solution: Program = Program.from_bytes(bytes(coin_spend.solution)) inner_solution: Program = full_solution.rest().rest().first() - recovery_list: List[bytes32] = [] + recovery_list: list[bytes32] = [] backup_required: int = num_verification.as_int() if recovery_list_hash != NIL_TREEHASH: try: @@ -2721,13 +2721,13 @@ async def did_find_lost_did(self, request: Dict[str, Any]) -> EndpointResult: @tx_endpoint(push=True) async def did_update_metadata( self, - request: Dict[str, Any], + request: dict[str, Any], action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> EndpointResult: wallet_id = uint32(request["wallet_id"]) wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=DIDWallet) - metadata: Dict[str, str] = {} + metadata: dict[str, str] = {} if "metadata" in request and type(request["metadata"]) is dict: metadata = request["metadata"] async with self.service.wallet_state_manager.lock: @@ -2746,7 +2746,7 @@ async def did_update_metadata( else: return {"success": False, "error": f"Couldn't update metadata with input: {metadata}"} - async def did_get_did(self, request: Dict[str, Any]) -> EndpointResult: + async def did_get_did(self, request: dict[str, Any]) -> EndpointResult: wallet_id = uint32(request["wallet_id"]) wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=DIDWallet) my_did: str = encode_puzzle_hash(bytes32.fromhex(wallet.get_my_DID()), AddressType.DID.hrp(self.service.config)) @@ -2757,7 +2757,7 @@ async def did_get_did(self, request: Dict[str, Any]) -> EndpointResult: except RuntimeError: return {"success": True, "wallet_id": wallet_id, "my_did": my_did} - async def did_get_recovery_list(self, request: Dict[str, Any]) -> EndpointResult: + async def did_get_recovery_list(self, request: dict[str, Any]) -> EndpointResult: wallet_id = uint32(request["wallet_id"]) wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=DIDWallet) recovery_list = wallet.did_info.backup_ids @@ -2771,7 +2771,7 @@ async def did_get_recovery_list(self, request: Dict[str, Any]) -> EndpointResult "num_required": wallet.did_info.num_of_backup_ids_needed, } - async def did_get_metadata(self, request: Dict[str, Any]) -> EndpointResult: + async def did_get_metadata(self, request: dict[str, Any]) -> EndpointResult: wallet_id = uint32(request["wallet_id"]) wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=DIDWallet) metadata = json.loads(wallet.did_info.metadata) @@ -2783,7 +2783,7 @@ async def did_get_metadata(self, request: Dict[str, Any]) -> EndpointResult: # TODO: this needs a test # Don't need full @tx_endpoint decorator here, but "push" is still a valid option - async def did_recovery_spend(self, request: Dict[str, Any]) -> EndpointResult: # pragma: no cover + async def did_recovery_spend(self, request: dict[str, Any]) -> EndpointResult: # pragma: no cover wallet_id = uint32(request["wallet_id"]) wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=DIDWallet) if len(request["attest_data"]) < wallet.did_info.num_of_backup_ids_needed: @@ -2825,7 +2825,7 @@ async def did_recovery_spend(self, request: Dict[str, Any]) -> EndpointResult: "transactions": [tx.to_json_dict_convenience(self.service.config)], } - async def did_get_pubkey(self, request: Dict[str, Any]) -> EndpointResult: + async def did_get_pubkey(self, request: dict[str, Any]) -> EndpointResult: wallet_id = uint32(request["wallet_id"]) wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=DIDWallet) pubkey = bytes((await wallet.wallet_state_manager.get_unused_derivation_record(wallet_id)).pubkey).hex() @@ -2835,9 +2835,9 @@ async def did_get_pubkey(self, request: Dict[str, Any]) -> EndpointResult: @tx_endpoint(push=True) async def did_create_attest( self, - request: Dict[str, Any], + request: dict[str, Any], action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> EndpointResult: # pragma: no cover wallet_id = uint32(request["wallet_id"]) wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=DIDWallet) @@ -2863,7 +2863,7 @@ async def did_create_attest( else: return {"success": False} - async def did_get_information_needed_for_recovery(self, request: Dict[str, Any]) -> EndpointResult: + async def did_get_information_needed_for_recovery(self, request: dict[str, Any]) -> EndpointResult: wallet_id = uint32(request["wallet_id"]) did_wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=DIDWallet) my_did = encode_puzzle_hash( @@ -2881,7 +2881,7 @@ async def did_get_information_needed_for_recovery(self, request: Dict[str, Any]) "backup_dids": did_wallet.did_info.backup_ids, } - async def did_get_current_coin_info(self, request: Dict[str, Any]) -> EndpointResult: + async def did_get_current_coin_info(self, request: dict[str, Any]) -> EndpointResult: wallet_id = uint32(request["wallet_id"]) did_wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=DIDWallet) my_did = encode_puzzle_hash( @@ -2900,7 +2900,7 @@ async def did_get_current_coin_info(self, request: Dict[str, Any]) -> EndpointRe "did_amount": did_coin_threeple[2], } - async def did_create_backup_file(self, request: Dict[str, Any]) -> EndpointResult: + async def did_create_backup_file(self, request: dict[str, Any]) -> EndpointResult: wallet_id = uint32(request["wallet_id"]) did_wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=DIDWallet) return {"wallet_id": wallet_id, "success": True, "backup_data": did_wallet.create_backup()} @@ -2908,9 +2908,9 @@ async def did_create_backup_file(self, request: Dict[str, Any]) -> EndpointResul @tx_endpoint(push=True) async def did_transfer_did( self, - request: Dict[str, Any], + request: dict[str, Any], action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> EndpointResult: if await self.service.wallet_state_manager.synced() is False: raise ValueError("Wallet needs to be fully synced.") @@ -2937,7 +2937,7 @@ async def did_transfer_did( # DAO Wallet ########################################################################################## - async def dao_adjust_filter_level(self, request: Dict[str, Any]) -> EndpointResult: + async def dao_adjust_filter_level(self, request: dict[str, Any]) -> EndpointResult: wallet_id = uint32(request["wallet_id"]) dao_wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=DAOWallet) await dao_wallet.adjust_filter_level(uint64(request["filter_level"])) @@ -2949,9 +2949,9 @@ async def dao_adjust_filter_level(self, request: Dict[str, Any]) -> EndpointResu @tx_endpoint(push=True) async def dao_add_funds_to_treasury( self, - request: Dict[str, Any], + request: dict[str, Any], action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> EndpointResult: wallet_id = uint32(request["wallet_id"]) dao_wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=DAOWallet) @@ -2975,7 +2975,7 @@ async def dao_add_funds_to_treasury( "transactions": None, # tx_endpoint wrapper will take care of this } - async def dao_get_treasury_balance(self, request: Dict[str, Any]) -> EndpointResult: + async def dao_get_treasury_balance(self, request: dict[str, Any]) -> EndpointResult: wallet_id = uint32(request["wallet_id"]) dao_wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=DAOWallet) assert dao_wallet is not None @@ -2989,14 +2989,14 @@ async def dao_get_treasury_balance(self, request: Dict[str, Any]) -> EndpointRes balances[asset_id.hex()] = balance return {"success": True, "balances": balances} - async def dao_get_treasury_id(self, request: Dict[str, Any]) -> EndpointResult: + async def dao_get_treasury_id(self, request: dict[str, Any]) -> EndpointResult: wallet_id = uint32(request["wallet_id"]) dao_wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=DAOWallet) assert dao_wallet is not None treasury_id = dao_wallet.dao_info.treasury_id return {"treasury_id": treasury_id} - async def dao_get_rules(self, request: Dict[str, Any]) -> EndpointResult: + async def dao_get_rules(self, request: dict[str, Any]) -> EndpointResult: wallet_id = uint32(request["wallet_id"]) dao_wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=DAOWallet) assert dao_wallet is not None @@ -3006,9 +3006,9 @@ async def dao_get_rules(self, request: Dict[str, Any]) -> EndpointResult: @tx_endpoint(push=True) async def dao_send_to_lockup( self, - request: Dict[str, Any], + request: dict[str, Any], action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> EndpointResult: wallet_id = uint32(request["wallet_id"]) dao_wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=DAOWallet) @@ -3030,7 +3030,7 @@ async def dao_send_to_lockup( "transactions": None, # tx_endpoint wrapper will take care of this } - async def dao_get_proposals(self, request: Dict[str, Any]) -> EndpointResult: + async def dao_get_proposals(self, request: dict[str, Any]) -> EndpointResult: wallet_id = uint32(request["wallet_id"]) include_closed = request.get("include_closed", True) dao_wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=DAOWallet) @@ -3046,7 +3046,7 @@ async def dao_get_proposals(self, request: Dict[str, Any]) -> EndpointResult: "soft_close_length": dao_rules.soft_close_length, } - async def dao_get_proposal_state(self, request: Dict[str, Any]) -> EndpointResult: + async def dao_get_proposal_state(self, request: dict[str, Any]) -> EndpointResult: wallet_id = uint32(request["wallet_id"]) dao_wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=DAOWallet) assert dao_wallet is not None @@ -3056,9 +3056,9 @@ async def dao_get_proposal_state(self, request: Dict[str, Any]) -> EndpointResul @tx_endpoint(push=True) async def dao_exit_lockup( self, - request: Dict[str, Any], + request: dict[str, Any], action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> EndpointResult: wallet_id = uint32(request["wallet_id"]) dao_wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=DAOWallet) @@ -3069,7 +3069,7 @@ async def dao_exit_lockup( assert dao_cat_wallet is not None if request["coins"]: # pragma: no cover coin_list = [Coin.from_json_dict(coin) for coin in request["coins"]] - coins: List[LockedCoinInfo] = [] + coins: list[LockedCoinInfo] = [] for lci in dao_cat_wallet.dao_cat_info.locked_coins: if lci.coin in coin_list: coins.append(lci) @@ -3097,19 +3097,19 @@ async def dao_exit_lockup( @tx_endpoint(push=True) async def dao_create_proposal( self, - request: Dict[str, Any], + request: dict[str, Any], action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> EndpointResult: wallet_id = uint32(request["wallet_id"]) dao_wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=DAOWallet) assert dao_wallet is not None if request["proposal_type"] == "spend": - amounts: List[uint64] = [] - puzzle_hashes: List[bytes32] = [] - asset_types: List[Optional[bytes32]] = [] - additions: Optional[List[Dict[str, Any]]] = request.get("additions") + amounts: list[uint64] = [] + puzzle_hashes: list[bytes32] = [] + asset_types: list[Optional[bytes32]] = [] + additions: Optional[list[dict[str, Any]]] = request.get("additions") if additions is not None: for addition in additions: if "asset_id" in addition: @@ -3195,9 +3195,9 @@ async def dao_create_proposal( @tx_endpoint(push=True) async def dao_vote_on_proposal( self, - request: Dict[str, Any], + request: dict[str, Any], action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> EndpointResult: wallet_id = uint32(request["wallet_id"]) dao_wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=DAOWallet) @@ -3221,7 +3221,7 @@ async def dao_vote_on_proposal( "transactions": None, # tx_endpoint wrapper will take care of this } - async def dao_parse_proposal(self, request: Dict[str, Any]) -> EndpointResult: + async def dao_parse_proposal(self, request: dict[str, Any]) -> EndpointResult: wallet_id = uint32(request["wallet_id"]) dao_wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=DAOWallet) assert dao_wallet is not None @@ -3233,9 +3233,9 @@ async def dao_parse_proposal(self, request: Dict[str, Any]) -> EndpointResult: @tx_endpoint(push=True) async def dao_close_proposal( self, - request: Dict[str, Any], + request: dict[str, Any], action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> EndpointResult: wallet_id = uint32(request["wallet_id"]) dao_wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=DAOWallet) @@ -3264,9 +3264,9 @@ async def dao_close_proposal( @tx_endpoint(push=True) async def dao_free_coins_from_finished_proposals( self, - request: Dict[str, Any], + request: dict[str, Any], action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> EndpointResult: wallet_id = uint32(request["wallet_id"]) fee = uint64(request.get("fee", 0)) @@ -3291,9 +3291,9 @@ async def dao_free_coins_from_finished_proposals( @tx_endpoint(push=True) async def nft_mint_nft( self, - request: Dict[str, Any], + request: dict[str, Any], action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> EndpointResult: log.debug("Got minting RPC request: %s", request) wallet_id = uint32(request["wallet_id"]) @@ -3364,7 +3364,7 @@ async def nft_mint_nft( "transactions": None, # tx_endpoint wrapper will take care of this } - async def nft_count_nfts(self, request: Dict[str, Any]) -> EndpointResult: + async def nft_count_nfts(self, request: dict[str, Any]) -> EndpointResult: wallet_id = request.get("wallet_id", None) count = 0 if wallet_id is not None: @@ -3378,9 +3378,9 @@ async def nft_count_nfts(self, request: Dict[str, Any]) -> EndpointResult: count = await self.service.wallet_state_manager.nft_store.count() return {"wallet_id": wallet_id, "success": True, "count": count} - async def nft_get_nfts(self, request: Dict[str, Any]) -> EndpointResult: + async def nft_get_nfts(self, request: dict[str, Any]) -> EndpointResult: wallet_id = request.get("wallet_id", None) - nfts: List[NFTCoinInfo] = [] + nfts: list[NFTCoinInfo] = [] if wallet_id is not None: nft_wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=NFTWallet) else: @@ -3406,9 +3406,9 @@ async def nft_get_nfts(self, request: Dict[str, Any]) -> EndpointResult: @tx_endpoint(push=True) async def nft_set_nft_did( self, - request: Dict[str, Any], + request: dict[str, Any], action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> EndpointResult: wallet_id = uint32(request["wallet_id"]) nft_wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=NFTWallet) @@ -3439,9 +3439,9 @@ async def nft_set_nft_did( @tx_endpoint(push=True) async def nft_set_did_bulk( self, - request: Dict[str, Any], + request: dict[str, Any], action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> EndpointResult: """ Bulk set DID for NFTs across different wallets. @@ -3457,7 +3457,7 @@ async def nft_set_did_bulk( did_id = request.get("did_id", b"") if did_id != b"": did_id = decode_puzzle_hash(did_id) - nft_dict: Dict[uint32, List[NFTCoinInfo]] = {} + nft_dict: dict[uint32, list[NFTCoinInfo]] = {} coin_ids = [] nft_ids = [] fee = uint64(request.get("fee", 0)) @@ -3517,9 +3517,9 @@ async def nft_set_did_bulk( @tx_endpoint(push=True) async def nft_transfer_bulk( self, - request: Dict[str, Any], + request: dict[str, Any], action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> EndpointResult: """ Bulk transfer NFTs to an address. @@ -3537,7 +3537,7 @@ async def nft_transfer_bulk( puzzle_hash = decode_puzzle_hash(address) else: return dict(success=False, error="target_address parameter missing") - nft_dict: Dict[uint32, List[NFTCoinInfo]] = {} + nft_dict: dict[uint32, list[NFTCoinInfo]] = {} coin_ids = [] fee = uint64(request.get("fee", 0)) @@ -3588,7 +3588,7 @@ async def nft_transfer_bulk( "transactions": None, # tx_endpoint wrapper will take care of this } - async def nft_get_by_did(self, request: Dict[str, Any]) -> EndpointResult: + async def nft_get_by_did(self, request: dict[str, Any]) -> EndpointResult: did_id: Optional[bytes32] = None if request.get("did_id", None) is not None: did_id = decode_puzzle_hash(request["did_id"]) @@ -3597,7 +3597,7 @@ async def nft_get_by_did(self, request: Dict[str, Any]) -> EndpointResult: return {"wallet_id": wallet.wallet_id, "success": True} return {"error": f"Cannot find a NFT wallet DID = {did_id}", "success": False} - async def nft_get_wallet_did(self, request: Dict[str, Any]) -> EndpointResult: + async def nft_get_wallet_did(self, request: dict[str, Any]) -> EndpointResult: wallet_id = uint32(request["wallet_id"]) nft_wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=NFTWallet) did_bytes: Optional[bytes32] = nft_wallet.get_did() @@ -3606,9 +3606,9 @@ async def nft_get_wallet_did(self, request: Dict[str, Any]) -> EndpointResult: did_id = encode_puzzle_hash(did_bytes, AddressType.DID.hrp(self.service.config)) return {"success": True, "did_id": None if len(did_id) == 0 else did_id} - async def nft_get_wallets_with_dids(self, request: Dict[str, Any]) -> EndpointResult: + async def nft_get_wallets_with_dids(self, request: dict[str, Any]) -> EndpointResult: all_wallets = self.service.wallet_state_manager.wallets.values() - did_wallets_by_did_id: Dict[bytes32, uint32] = {} + did_wallets_by_did_id: dict[bytes32, uint32] = {} for wallet in all_wallets: if wallet.type() == WalletType.DECENTRALIZED_ID: @@ -3616,7 +3616,7 @@ async def nft_get_wallets_with_dids(self, request: Dict[str, Any]) -> EndpointRe if wallet.did_info.origin_coin is not None: did_wallets_by_did_id[wallet.did_info.origin_coin.name()] = wallet.id() - did_nft_wallets: List[Dict[str, Any]] = [] + did_nft_wallets: list[dict[str, Any]] = [] for wallet in all_wallets: if isinstance(wallet, NFTWallet): nft_wallet_did: Optional[bytes32] = wallet.get_did() @@ -3634,7 +3634,7 @@ async def nft_get_wallets_with_dids(self, request: Dict[str, Any]) -> EndpointRe ) return {"success": True, "nft_wallets": did_nft_wallets} - async def nft_set_nft_status(self, request: Dict[str, Any]) -> EndpointResult: + async def nft_set_nft_status(self, request: dict[str, Any]) -> EndpointResult: wallet_id: uint32 = uint32(request["wallet_id"]) coin_id: bytes32 = bytes32.from_hexstr(request["coin_id"]) status: bool = request["in_transaction"] @@ -3646,9 +3646,9 @@ async def nft_set_nft_status(self, request: Dict[str, Any]) -> EndpointResult: @tx_endpoint(push=True) async def nft_transfer_nft( self, - request: Dict[str, Any], + request: dict[str, Any], action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> EndpointResult: wallet_id = uint32(request["wallet_id"]) address = request["target_address"] @@ -3689,7 +3689,7 @@ async def nft_transfer_nft( log.exception(f"Failed to transfer NFT: {e}") return {"success": False, "error": str(e)} - async def nft_get_info(self, request: Dict[str, Any]) -> EndpointResult: + async def nft_get_info(self, request: dict[str, Any]) -> EndpointResult: if "coin_id" not in request: return {"success": False, "error": "Coin ID is required."} coin_id = request["coin_id"] @@ -3729,7 +3729,7 @@ async def nft_get_info(self, request: Dict[str, Any]) -> EndpointResult: ) # Get launcher coin - launcher_coin: List[CoinState] = await self.service.wallet_state_manager.wallet_node.get_coin_state( + launcher_coin: list[CoinState] = await self.service.wallet_state_manager.wallet_node.get_coin_state( [uncurried_nft.singleton_launcher_id], peer=peer ) if launcher_coin is None or len(launcher_coin) < 1 or launcher_coin[0].spent_height is None: @@ -3758,9 +3758,9 @@ async def nft_get_info(self, request: Dict[str, Any]) -> EndpointResult: @tx_endpoint(push=True) async def nft_add_uri( self, - request: Dict[str, Any], + request: dict[str, Any], action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> EndpointResult: wallet_id = uint32(request["wallet_id"]) # Note metadata updater can only add one uri for one field per spend. @@ -3786,7 +3786,7 @@ async def nft_add_uri( "transactions": None, # tx_endpoint wrapper will take care of this } - async def nft_calculate_royalties(self, request: Dict[str, Any]) -> EndpointResult: + async def nft_calculate_royalties(self, request: dict[str, Any]) -> EndpointResult: return NFTWallet.royalty_calculation( { asset["asset"]: (asset["royalty_address"], uint16(asset["royalty_percentage"])) @@ -3798,9 +3798,9 @@ async def nft_calculate_royalties(self, request: Dict[str, Any]) -> EndpointResu @tx_endpoint(push=False) async def nft_mint_bulk( self, - request: Dict[str, Any], + request: dict[str, Any], action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> EndpointResult: if action_scope.config.push: raise ValueError("Automatic pushing of nft minting transactions not yet available") # pragma: no cover @@ -3927,7 +3927,7 @@ async def nft_mint_bulk( "transactions": None, # tx_endpoint wrapper will take care of this } - async def get_coin_records(self, request: Dict[str, Any]) -> EndpointResult: + async def get_coin_records(self, request: dict[str, Any]) -> EndpointResult: parsed_request = GetCoinRecords.from_json_dict(request) if parsed_request.limit != uint32.MAXIMUM and parsed_request.limit > self.max_get_coin_records_limit: @@ -3969,8 +3969,8 @@ async def get_coin_records(self, request: Dict[str, Any]) -> EndpointResult: "total_count": result.total_count, } - async def get_farmed_amount(self, request: Dict[str, Any]) -> EndpointResult: - tx_records: List[TransactionRecord] = await self.service.wallet_state_manager.tx_store.get_farming_rewards() + async def get_farmed_amount(self, request: dict[str, Any]) -> EndpointResult: + tx_records: list[TransactionRecord] = await self.service.wallet_state_manager.tx_store.get_farming_rewards() amount = 0 pool_reward_amount = 0 farmer_reward_amount = 0 @@ -4016,9 +4016,9 @@ async def get_farmed_amount(self, request: Dict[str, Any]) -> EndpointResult: @tx_endpoint(push=False) async def create_signed_transaction( self, - request: Dict[str, Any], + request: dict[str, Any], action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), hold_lock: bool = True, ) -> EndpointResult: if "wallet_id" in request: @@ -4034,7 +4034,7 @@ async def create_signed_transaction( if "additions" not in request or len(request["additions"]) < 1: raise ValueError("Specify additions list") - additions: List[Dict[str, Any]] = request["additions"] + additions: list[dict[str, Any]] = request["additions"] amount_0: uint64 = uint64(additions[0]["amount"]) assert amount_0 <= self.service.constants.MAX_COIN_AMOUNT puzzle_hash_0 = bytes32.from_hexstr(additions[0]["puzzle_hash"]) @@ -4043,7 +4043,7 @@ async def create_signed_transaction( memos_0 = [] if "memos" not in additions[0] else [mem.encode("utf-8") for mem in additions[0]["memos"]] - additional_outputs: List[Payment] = [] + additional_outputs: list[Payment] = [] for addition in additions[1:]: receiver_ph = bytes32.from_hexstr(addition["puzzle_hash"]) if len(receiver_ph) != 32: @@ -4150,9 +4150,9 @@ async def _generate_signed_transaction() -> EndpointResult: @tx_endpoint(push=True) async def pw_join_pool( self, - request: Dict[str, Any], + request: dict[str, Any], action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> EndpointResult: fee = uint64(request.get("fee", 0)) wallet_id = uint32(request["wallet_id"]) @@ -4188,9 +4188,9 @@ async def pw_join_pool( @tx_endpoint(push=True) async def pw_self_pool( self, - request: Dict[str, Any], + request: dict[str, Any], action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> EndpointResult: # Leaving a pool requires two state transitions. # First we transition to PoolSingletonState.LEAVING_POOL @@ -4214,9 +4214,9 @@ async def pw_self_pool( @tx_endpoint(push=True) async def pw_absorb_rewards( self, - request: Dict[str, Any], + request: dict[str, Any], action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> EndpointResult: """Perform a sweep of the p2_singleton rewards controlled by the pool wallet singleton""" if await self.service.wallet_state_manager.synced() is False: @@ -4237,14 +4237,14 @@ async def pw_absorb_rewards( "transactions": None, # tx_endpoint wrapper will take care of this } - async def pw_status(self, request: Dict[str, Any]) -> EndpointResult: + async def pw_status(self, request: dict[str, Any]) -> EndpointResult: """Return the complete state of the Pool wallet with id `request["wallet_id"]`""" wallet_id = uint32(request["wallet_id"]) wallet = self.service.wallet_state_manager.get_wallet(id=wallet_id, required_type=PoolWallet) assert isinstance(wallet, PoolWallet) state: PoolWalletInfo = await wallet.get_current_state() - unconfirmed_transactions: List[TransactionRecord] = await wallet.get_unconfirmed_transactions() + unconfirmed_transactions: list[TransactionRecord] = await wallet.get_unconfirmed_transactions() return { "state": state.to_json_dict(), "unconfirmed_transactions": unconfirmed_transactions, @@ -4256,9 +4256,9 @@ async def pw_status(self, request: Dict[str, Any]) -> EndpointResult: @tx_endpoint(push=True) async def create_new_dl( self, - request: Dict[str, Any], + request: dict[str, Any], action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> EndpointResult: """Initialize the DataLayer Wallet (only one can exist)""" if self.service.wallet_state_manager is None: @@ -4287,7 +4287,7 @@ async def create_new_dl( "launcher_id": launcher_id, } - async def dl_track_new(self, request: Dict[str, Any]) -> EndpointResult: + async def dl_track_new(self, request: dict[str, Any]) -> EndpointResult: """Initialize the DataLayer Wallet (only one can exist)""" if self.service.wallet_state_manager is None: raise ValueError("The wallet service is not currently initialized") @@ -4312,7 +4312,7 @@ async def dl_track_new(self, request: Dict[str, Any]) -> EndpointResult: continue # try some other peers, maybe someone has it return {} - async def dl_stop_tracking(self, request: Dict[str, Any]) -> EndpointResult: + async def dl_stop_tracking(self, request: dict[str, Any]) -> EndpointResult: """Initialize the DataLayer Wallet (only one can exist)""" if self.service.wallet_state_manager is None: raise ValueError("The wallet service is not currently initialized") @@ -4321,7 +4321,7 @@ async def dl_stop_tracking(self, request: Dict[str, Any]) -> EndpointResult: await dl_wallet.stop_tracking_singleton(bytes32.from_hexstr(request["launcher_id"])) return {} - async def dl_latest_singleton(self, request: Dict[str, Any]) -> EndpointResult: + async def dl_latest_singleton(self, request: dict[str, Any]) -> EndpointResult: """Get the singleton record for the latest singleton of a launcher ID""" if self.service.wallet_state_manager is None: raise ValueError("The wallet service is not currently initialized") @@ -4333,7 +4333,7 @@ async def dl_latest_singleton(self, request: Dict[str, Any]) -> EndpointResult: record = await wallet.get_latest_singleton(bytes32.from_hexstr(request["launcher_id"]), only_confirmed) return {"singleton": None if record is None else record.to_json_dict()} - async def dl_singletons_by_root(self, request: Dict[str, Any]) -> EndpointResult: + async def dl_singletons_by_root(self, request: dict[str, Any]) -> EndpointResult: """Get the singleton records that contain the specified root""" if self.service.wallet_state_manager is None: raise ValueError("The wallet service is not currently initialized") @@ -4348,9 +4348,9 @@ async def dl_singletons_by_root(self, request: Dict[str, Any]) -> EndpointResult @tx_endpoint(push=True) async def dl_update_root( self, - request: Dict[str, Any], + request: dict[str, Any], action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> EndpointResult: """Get the singleton record for the latest singleton of a launcher ID""" if self.service.wallet_state_manager is None: @@ -4374,9 +4374,9 @@ async def dl_update_root( @tx_endpoint(push=True) async def dl_update_multiple( self, - request: Dict[str, Any], + request: dict[str, Any], action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> EndpointResult: """Update multiple singletons with new merkle roots""" if self.service.wallet_state_manager is None: @@ -4400,7 +4400,7 @@ async def dl_update_multiple( "transactions": None, # tx_endpoint wrapper will take care of this } - async def dl_history(self, request: Dict[str, Any]) -> EndpointResult: + async def dl_history(self, request: dict[str, Any]) -> EndpointResult: """Get the singleton record for the latest singleton of a launcher ID""" if self.service.wallet_state_manager is None: raise ValueError("The wallet service is not currently initialized") @@ -4419,7 +4419,7 @@ async def dl_history(self, request: Dict[str, Any]) -> EndpointResult: history_json = [rec.to_json_dict() for rec in history] return {"history": history_json, "count": len(history_json)} - async def dl_owned_singletons(self, request: Dict[str, Any]) -> EndpointResult: + async def dl_owned_singletons(self, request: dict[str, Any]) -> EndpointResult: """Get all owned singleton records""" if self.service.wallet_state_manager is None: raise ValueError("The wallet service is not currently initialized") @@ -4434,7 +4434,7 @@ async def dl_owned_singletons(self, request: Dict[str, Any]) -> EndpointResult: return {"singletons": singletons_json, "count": len(singletons_json)} - async def dl_get_mirrors(self, request: Dict[str, Any]) -> EndpointResult: + async def dl_get_mirrors(self, request: dict[str, Any]) -> EndpointResult: """Get all of the mirrors for a specific singleton""" if self.service.wallet_state_manager is None: raise ValueError("The wallet service is not currently initialized") @@ -4449,9 +4449,9 @@ async def dl_get_mirrors(self, request: Dict[str, Any]) -> EndpointResult: @tx_endpoint(push=True) async def dl_new_mirror( self, - request: Dict[str, Any], + request: dict[str, Any], action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> EndpointResult: """Add a new on chain message for a specific singleton""" if self.service.wallet_state_manager is None: @@ -4475,9 +4475,9 @@ async def dl_new_mirror( @tx_endpoint(push=True) async def dl_delete_mirror( self, - request: Dict[str, Any], + request: dict[str, Any], action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> EndpointResult: """Remove an existing mirror for a specific singleton""" if self.service.wallet_state_manager is None: @@ -4500,7 +4500,7 @@ async def dl_delete_mirror( async def dl_verify_proof( self, - request: Dict[str, Any], + request: dict[str, Any], ) -> EndpointResult: """Verify a proof of inclusion for a DL singleton""" res = await dl_verify_proof( @@ -4517,9 +4517,9 @@ async def dl_verify_proof( @tx_endpoint(push=True) async def vc_mint( self, - request: Dict[str, Any], + request: dict[str, Any], action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> EndpointResult: """ Mint a verified credential using the assigned DID @@ -4552,7 +4552,7 @@ class VCMint(Streamable): "transactions": None, # tx_endpoint wrapper will take care of this } - async def vc_get(self, request: Dict[str, Any]) -> EndpointResult: + async def vc_get(self, request: dict[str, Any]) -> EndpointResult: """ Given a launcher ID get the verified credential :param request: the 'vc_id' launcher id of a verifiable credential @@ -4569,7 +4569,7 @@ class VCGet(Streamable): vc_record = await self.service.wallet_state_manager.vc_store.get_vc_record(parsed_request.vc_id) return {"vc_record": vc_record} - async def vc_get_list(self, request: Dict[str, Any]) -> EndpointResult: + async def vc_get_list(self, request: dict[str, Any]) -> EndpointResult: """ Get a list of verified credentials :param request: optional parameters for pagination 'start' and 'count' @@ -4602,9 +4602,9 @@ class VCGetList(Streamable): @tx_endpoint(push=True) async def vc_spend( self, - request: Dict[str, Any], + request: dict[str, Any], action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> EndpointResult: """ Spend a verified credential @@ -4641,7 +4641,7 @@ class VCSpend(Streamable): "transactions": None, # tx_endpoint wrapper will take care of this } - async def vc_add_proofs(self, request: Dict[str, Any]) -> EndpointResult: + async def vc_add_proofs(self, request: dict[str, Any]) -> EndpointResult: """ Add a set of proofs to the DB that can be used when spending a VC. VCs are near useless until their proofs have been added. @@ -4654,7 +4654,7 @@ async def vc_add_proofs(self, request: Dict[str, Any]) -> EndpointResult: return {} - async def vc_get_proofs_for_root(self, request: Dict[str, Any]) -> EndpointResult: + async def vc_get_proofs_for_root(self, request: dict[str, Any]) -> EndpointResult: """ Given a specified vc root, get any proofs associated with that root. :param request: must specify 'root' representing the tree hash of some set of proofs @@ -4677,9 +4677,9 @@ class VCGetProofsForRoot(Streamable): @tx_endpoint(push=True) async def vc_revoke( self, - request: Dict[str, Any], + request: dict[str, Any], action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> EndpointResult: """ Revoke an on chain VC provided the correct DID is available @@ -4711,9 +4711,9 @@ class VCRevoke(Streamable): @tx_endpoint(push=True) async def crcat_approve_pending( self, - request: Dict[str, Any], + request: dict[str, Any], action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> EndpointResult: """ Moving any "pending approval" CR-CATs into the spendable balance of the wallet diff --git a/chia/rpc/wallet_rpc_client.py b/chia/rpc/wallet_rpc_client.py index 2cd2022bbd55..980a3bec4078 100644 --- a/chia/rpc/wallet_rpc_client.py +++ b/chia/rpc/wallet_rpc_client.py @@ -1,6 +1,7 @@ from __future__ import annotations -from typing import Any, Dict, List, Optional, Sequence, Tuple, Union, cast +from collections.abc import Sequence +from typing import Any, Optional, Union, cast from chia.data_layer.data_layer_util import DLProof, VerifyProofResponse from chia.data_layer.data_layer_wallet import Mirror, SingletonRecord @@ -110,7 +111,7 @@ from chia.wallet.wallet_coin_store import GetCoinRecords -def parse_result_transactions(result: Dict[str, Any]) -> Dict[str, Any]: +def parse_result_transactions(result: dict[str, Any]) -> dict[str, Any]: result["transaction"] = TransactionRecord.from_json_dict(result["transaction"]) result["transactions"] = [TransactionRecord.from_json_dict_convenience(tx) for tx in result["transactions"]] if result["fee_transaction"]: @@ -172,7 +173,7 @@ async def push_transactions( self, request: PushTransactions, tx_config: TXConfig, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), ) -> PushTransactionsResponse: return PushTransactionsResponse.from_json_dict( @@ -193,27 +194,27 @@ async def get_auto_claim(self) -> AutoClaimSettings: return AutoClaimSettings.from_json_dict(await self.fetch("get_auto_claim", {})) # Wallet Management APIs - async def get_wallets(self, wallet_type: Optional[WalletType] = None) -> List[Dict[str, Any]]: + async def get_wallets(self, wallet_type: Optional[WalletType] = None) -> list[dict[str, Any]]: if wallet_type is None: request = {} else: request = {"type": wallet_type} response = await self.fetch("get_wallets", request) # TODO: casting due to lack of type checked deserialization - return cast(List[Dict[str, Any]], response["wallets"]) + return cast(list[dict[str, Any]], response["wallets"]) # Wallet APIs - async def get_wallet_balance(self, wallet_id: int) -> Dict[str, Any]: + async def get_wallet_balance(self, wallet_id: int) -> dict[str, Any]: request = {"wallet_id": wallet_id} response = await self.fetch("get_wallet_balance", request) # TODO: casting due to lack of type checked deserialization - return cast(Dict[str, Any], response["wallet_balance"]) + return cast(dict[str, Any], response["wallet_balance"]) - async def get_wallet_balances(self, wallet_ids: Optional[List[int]] = None) -> Dict[str, Dict[str, Any]]: + async def get_wallet_balances(self, wallet_ids: Optional[list[int]] = None) -> dict[str, dict[str, Any]]: request = {"wallet_ids": wallet_ids} response = await self.fetch("get_wallet_balances", request) # TODO: casting due to lack of type checked deserialization - return cast(Dict[str, Dict[str, Any]], response["wallet_balances"]) + return cast(dict[str, dict[str, Any]], response["wallet_balances"]) async def get_transaction(self, transaction_id: bytes32) -> TransactionRecord: request = {"transaction_id": transaction_id.hex()} @@ -230,8 +231,8 @@ async def get_transactions( to_address: Optional[str] = None, type_filter: Optional[TransactionTypeFilter] = None, confirmed: Optional[bool] = None, - ) -> List[TransactionRecord]: - request: Dict[str, Any] = {"wallet_id": wallet_id} + ) -> list[TransactionRecord]: + request: dict[str, Any] = {"wallet_id": wallet_id} if start is not None: request["start"] = start @@ -256,7 +257,7 @@ async def get_transactions( async def get_transaction_count( self, wallet_id: int, confirmed: Optional[bool] = None, type_filter: Optional[TransactionTypeFilter] = None ) -> int: - request: Dict[str, Any] = {"wallet_id": wallet_id} + request: dict[str, Any] = {"wallet_id": wallet_id} if type_filter is not None: request["type_filter"] = type_filter.to_json_dict() if confirmed is not None: @@ -278,9 +279,9 @@ async def send_transaction( address: str, tx_config: TXConfig, fee: uint64 = uint64(0), - memos: Optional[List[str]] = None, - puzzle_decorator_override: Optional[List[Dict[str, Union[str, int, bool]]]] = None, - extra_conditions: Tuple[Condition, ...] = tuple(), + memos: Optional[list[str]] = None, + puzzle_decorator_override: Optional[list[dict[str, Union[str, int, bool]]]] = None, + extra_conditions: tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), push: bool = True, ) -> SendTransactionResponse: @@ -303,9 +304,9 @@ async def send_transaction( async def send_transaction_multi( self, wallet_id: int, - additions: List[Dict[str, Any]], + additions: list[dict[str, Any]], tx_config: TXConfig, - coins: Optional[List[Coin]] = None, + coins: Optional[list[Coin]] = None, fee: uint64 = uint64(0), push: bool = True, timelock_info: ConditionValidTimes = ConditionValidTimes(), @@ -332,13 +333,13 @@ async def send_transaction_multi( async def spend_clawback_coins( self, - coin_ids: List[bytes32], + coin_ids: list[bytes32], fee: int = 0, force: bool = False, push: bool = True, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), - ) -> Dict[str, Any]: + ) -> dict[str, Any]: request = { "coin_ids": [cid.hex() for cid in coin_ids], "fee": fee, @@ -363,17 +364,17 @@ async def extend_derivation_index(self, index: int) -> str: updated_index = response["index"] return str(updated_index) - async def get_farmed_amount(self) -> Dict[str, Any]: + async def get_farmed_amount(self) -> dict[str, Any]: return await self.fetch("get_farmed_amount", {}) async def create_signed_transactions( self, - additions: List[Dict[str, Any]], + additions: list[dict[str, Any]], tx_config: TXConfig, - coins: Optional[List[Coin]] = None, + coins: Optional[list[Coin]] = None, fee: uint64 = uint64(0), wallet_id: Optional[int] = None, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), push: bool = False, ) -> CreateSignedTransactionsResponse: @@ -403,17 +404,17 @@ async def create_signed_transactions( response = await self.fetch("create_signed_transaction", request) return json_deserialize_with_clvm_streamable(response, CreateSignedTransactionsResponse) - async def select_coins(self, amount: int, wallet_id: int, coin_selection_config: CoinSelectionConfig) -> List[Coin]: + async def select_coins(self, amount: int, wallet_id: int, coin_selection_config: CoinSelectionConfig) -> list[Coin]: request = {"amount": amount, "wallet_id": wallet_id, **coin_selection_config.to_json_dict()} response = await self.fetch("select_coins", request) return [Coin.from_json_dict(coin) for coin in response["coins"]] - async def get_coin_records(self, request: GetCoinRecords) -> Dict[str, Any]: + async def get_coin_records(self, request: GetCoinRecords) -> dict[str, Any]: return await self.fetch("get_coin_records", request.to_json_dict()) async def get_spendable_coins( self, wallet_id: int, coin_selection_config: CoinSelectionConfig - ) -> Tuple[List[CoinRecord], List[CoinRecord], List[Coin]]: + ) -> tuple[list[CoinRecord], list[CoinRecord], list[Coin]]: """ We return a tuple containing: (confirmed records, unconfirmed removals, unconfirmed additions) """ @@ -426,11 +427,11 @@ async def get_spendable_coins( async def get_coin_records_by_names( self, - names: List[bytes32], + names: list[bytes32], include_spent_coins: bool = True, start_height: Optional[int] = None, end_height: Optional[int] = None, - ) -> List[CoinRecord]: + ) -> list[CoinRecord]: names_hex = [name.hex() for name in names] request = {"names": names_hex, "include_spent_coins": include_spent_coins} if start_height is not None: @@ -448,14 +449,14 @@ async def create_new_did_wallet( tx_config: TXConfig, fee: int = 0, name: Optional[str] = "DID Wallet", - backup_ids: List[str] = [], + backup_ids: list[str] = [], required_num: int = 0, type: str = "new", backup_data: str = "", push: bool = True, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), - ) -> Dict[str, Any]: + ) -> dict[str, Any]: request = { "wallet_type": "did_wallet", "did_type": type, @@ -473,17 +474,17 @@ async def create_new_did_wallet( response = await self.fetch("create_new_wallet", request) return response - async def get_did_id(self, wallet_id: int) -> Dict[str, Any]: + async def get_did_id(self, wallet_id: int) -> dict[str, Any]: request = {"wallet_id": wallet_id} response = await self.fetch("did_get_did", request) return response - async def get_did_info(self, coin_id: str, latest: bool) -> Dict[str, Any]: + async def get_did_info(self, coin_id: str, latest: bool) -> dict[str, Any]: request = {"coin_id": coin_id, "latest": latest} response = await self.fetch("did_get_info", request) return response - async def create_did_backup_file(self, wallet_id: int, filename: str) -> Dict[str, Any]: + async def create_did_backup_file(self, wallet_id: int, filename: str) -> dict[str, Any]: request = {"wallet_id": wallet_id, "filename": filename} response = await self.fetch("did_create_backup_file", request) return response @@ -491,10 +492,10 @@ async def create_did_backup_file(self, wallet_id: int, filename: str) -> Dict[st async def update_did_recovery_list( self, wallet_id: int, - recovery_list: List[str], + recovery_list: list[str], num_verification: int, tx_config: TXConfig, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), push: bool = True, ) -> DIDUpdateRecoveryIDsResponse: @@ -510,7 +511,7 @@ async def update_did_recovery_list( response = await self.fetch("did_update_recovery_ids", request) return json_deserialize_with_clvm_streamable(response, DIDUpdateRecoveryIDsResponse) - async def get_did_recovery_list(self, wallet_id: int) -> Dict[str, Any]: + async def get_did_recovery_list(self, wallet_id: int) -> dict[str, Any]: request = {"wallet_id": wallet_id} response = await self.fetch("did_get_recovery_list", request) return response @@ -519,7 +520,7 @@ async def did_message_spend( self, wallet_id: int, tx_config: TXConfig, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), push: bool = False, ) -> DIDMessageSpendResponse: @@ -536,9 +537,9 @@ async def did_message_spend( async def update_did_metadata( self, wallet_id: int, - metadata: Dict[str, Any], + metadata: dict[str, Any], tx_config: TXConfig, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), push: bool = True, ) -> DIDUpdateMetadataResponse: @@ -556,7 +557,7 @@ async def update_did_metadata( async def get_did_pubkey(self, request: DIDGetPubkey) -> DIDGetPubkeyResponse: return DIDGetPubkeyResponse.from_json_dict(await self.fetch("did_get_pubkey", request.to_json_dict())) - async def get_did_metadata(self, wallet_id: int) -> Dict[str, Any]: + async def get_did_metadata(self, wallet_id: int) -> dict[str, Any]: request = {"wallet_id": wallet_id} response = await self.fetch("did_get_metadata", request) return response @@ -565,10 +566,10 @@ async def find_lost_did( self, coin_id: str, recovery_list_hash: Optional[str], - metadata: Optional[Dict[str, Any]], + metadata: Optional[dict[str, Any]], num_verification: Optional[int], - ) -> Dict[str, Any]: - request: Dict[str, Any] = {"coin_id": coin_id} + ) -> dict[str, Any]: + request: dict[str, Any] = {"coin_id": coin_id} if recovery_list_hash is not None: request["recovery_list_hash"] = recovery_list_hash if metadata is not None: @@ -578,7 +579,7 @@ async def find_lost_did( response = await self.fetch("did_find_lost_did", request) return response - async def create_new_did_wallet_from_recovery(self, filename: str) -> Dict[str, Any]: + async def create_new_did_wallet_from_recovery(self, filename: str) -> dict[str, Any]: request = {"wallet_type": "did_wallet", "did_type": "recovery", "filename": filename} response = await self.fetch("create_new_wallet", request) return response @@ -590,9 +591,9 @@ async def did_create_attest( pubkey: str, puzhash: str, file_name: str, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), - ) -> Dict[str, Any]: + ) -> dict[str, Any]: request = { "wallet_id": wallet_id, "coin_name": coin_name, @@ -615,7 +616,7 @@ async def did_get_current_coin_info(self, request: DIDGetCurrentCoinInfo) -> DID await self.fetch("did_get_current_coin_info", request.to_json_dict()) ) - async def did_recovery_spend(self, wallet_id: int, attest_filenames: str) -> Dict[str, Any]: + async def did_recovery_spend(self, wallet_id: int, attest_filenames: str) -> dict[str, Any]: request = {"wallet_id": wallet_id, "attest_filenames": attest_filenames} response = await self.fetch("did_recovery_spend", request) return response @@ -627,7 +628,7 @@ async def did_transfer_did( fee: int, with_recovery: bool, tx_config: TXConfig, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), push: bool = True, ) -> DIDTransferDIDResponse: @@ -644,12 +645,12 @@ async def did_transfer_did( response = await self.fetch("did_transfer_did", request) return json_deserialize_with_clvm_streamable(response, DIDTransferDIDResponse) - async def did_set_wallet_name(self, wallet_id: int, name: str) -> Dict[str, Any]: + async def did_set_wallet_name(self, wallet_id: int, name: str) -> dict[str, Any]: request = {"wallet_id": wallet_id, "name": name} response = await self.fetch("did_set_wallet_name", request) return response - async def did_get_wallet_name(self, wallet_id: int) -> Dict[str, Any]: + async def did_get_wallet_name(self, wallet_id: int) -> dict[str, Any]: request = {"wallet_id": wallet_id} response = await self.fetch("did_get_wallet_name", request) return response @@ -666,7 +667,7 @@ async def create_new_pool_wallet( fee: uint64, p2_singleton_delay_time: Optional[uint64] = None, p2_singleton_delayed_ph: Optional[bytes32] = None, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), ) -> TransactionRecord: request = { @@ -689,14 +690,14 @@ async def create_new_pool_wallet( res = await self.fetch("create_new_wallet", request) return TransactionRecord.from_json_dict(res["transaction"]) - async def pw_self_pool(self, wallet_id: int, fee: uint64) -> Dict[str, Any]: + async def pw_self_pool(self, wallet_id: int, fee: uint64) -> dict[str, Any]: reply = await self.fetch("pw_self_pool", {"wallet_id": wallet_id, "fee": fee}) reply = parse_result_transactions(reply) return reply async def pw_join_pool( self, wallet_id: int, target_puzzlehash: bytes32, pool_url: str, relative_lock_height: uint32, fee: uint64 - ) -> Dict[str, Any]: + ) -> dict[str, Any]: request = { "wallet_id": int(wallet_id), "target_puzzlehash": target_puzzlehash.hex(), @@ -710,7 +711,7 @@ async def pw_join_pool( async def pw_absorb_rewards( self, wallet_id: int, fee: uint64 = uint64(0), max_spends_in_tx: Optional[int] = None - ) -> Dict[str, Any]: + ) -> dict[str, Any]: reply = await self.fetch( "pw_absorb_rewards", {"wallet_id": wallet_id, "fee": fee, "max_spends_in_tx": max_spends_in_tx} ) @@ -718,7 +719,7 @@ async def pw_absorb_rewards( reply = parse_result_transactions(reply) return reply - async def pw_status(self, wallet_id: int) -> Tuple[PoolWalletInfo, List[TransactionRecord]]: + async def pw_status(self, wallet_id: int) -> tuple[PoolWalletInfo, list[TransactionRecord]]: json_dict = await self.fetch("pw_status", {"wallet_id": wallet_id}) return ( PoolWalletInfo.from_json_dict(json_dict["state"]), @@ -728,11 +729,11 @@ async def pw_status(self, wallet_id: int) -> Tuple[PoolWalletInfo, List[Transact # CATS async def create_new_cat_and_wallet( self, amount: uint64, fee: uint64 = uint64(0), test: bool = False - ) -> Dict[str, Any]: + ) -> dict[str, Any]: request = {"wallet_type": "cat_wallet", "mode": "new", "amount": amount, "fee": fee, "test": test} return await self.fetch("create_new_wallet", request) - async def create_wallet_for_existing_cat(self, asset_id: bytes) -> Dict[str, Any]: + async def create_wallet_for_existing_cat(self, asset_id: bytes) -> dict[str, Any]: request = {"wallet_type": "cat_wallet", "asset_id": asset_id.hex(), "mode": "existing"} return await self.fetch("create_new_wallet", request) @@ -740,12 +741,12 @@ async def get_cat_asset_id(self, wallet_id: int) -> bytes32: request = {"wallet_id": wallet_id} return bytes32.from_hexstr((await self.fetch("cat_get_asset_id", request))["asset_id"]) - async def get_stray_cats(self) -> List[Dict[str, Any]]: + async def get_stray_cats(self) -> list[dict[str, Any]]: response = await self.fetch("get_stray_cats", {}) # TODO: casting due to lack of type checked deserialization - return cast(List[Dict[str, Any]], response["stray_cats"]) + return cast(list[dict[str, Any]], response["stray_cats"]) - async def cat_asset_id_to_name(self, asset_id: bytes32) -> Optional[Tuple[Optional[uint32], str]]: + async def cat_asset_id_to_name(self, asset_id: bytes32) -> Optional[tuple[Optional[uint32], str]]: request = {"asset_id": asset_id.hex()} try: res = await self.fetch("cat_asset_id_to_name", request) @@ -762,7 +763,7 @@ async def get_cat_name(self, wallet_id: int) -> str: return cast(str, response["name"]) async def set_cat_name(self, wallet_id: int, name: str) -> None: - request: Dict[str, Any] = { + request: dict[str, Any] = { "wallet_id": wallet_id, "name": name, } @@ -775,15 +776,15 @@ async def cat_spend( amount: Optional[uint64] = None, inner_address: Optional[str] = None, fee: uint64 = uint64(0), - memos: Optional[List[str]] = None, - additions: Optional[List[Dict[str, Any]]] = None, - removals: Optional[List[Coin]] = None, - cat_discrepancy: Optional[Tuple[int, Program, Program]] = None, # (extra_delta, tail_reveal, tail_solution) - extra_conditions: Tuple[Condition, ...] = tuple(), + memos: Optional[list[str]] = None, + additions: Optional[list[dict[str, Any]]] = None, + removals: Optional[list[Coin]] = None, + cat_discrepancy: Optional[tuple[int, Program, Program]] = None, # (extra_delta, tail_reveal, tail_solution) + extra_conditions: tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), push: bool = True, ) -> CATSpendResponse: - send_dict: Dict[str, Any] = { + send_dict: dict[str, Any] = { "wallet_id": wallet_id, "fee": fee, "memos": memos if memos is not None else [], @@ -816,16 +817,16 @@ async def cat_spend( # Offers async def create_offer_for_ids( self, - offer_dict: Dict[Union[uint32, str], int], + offer_dict: dict[Union[uint32, str], int], tx_config: TXConfig, - driver_dict: Optional[Dict[str, Any]] = None, - solver: Optional[Dict[str, Any]] = None, + driver_dict: Optional[dict[str, Any]] = None, + solver: Optional[dict[str, Any]] = None, fee: int = 0, validate_only: bool = False, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), ) -> CreateOfferForIDsResponse: - send_dict: Dict[str, int] = {str(key): value for key, value in offer_dict.items()} + send_dict: dict[str, int] = {str(key): value for key, value in offer_dict.items()} req = { "offer": send_dict, @@ -844,11 +845,11 @@ async def create_offer_for_ids( async def get_offer_summary( self, offer: Offer, advanced: bool = False - ) -> Tuple[bytes32, Dict[str, Dict[str, int]]]: + ) -> tuple[bytes32, dict[str, dict[str, int]]]: res = await self.fetch("get_offer_summary", {"offer": offer.to_bech32(), "advanced": advanced}) return bytes32.from_hexstr(res["id"]), res["summary"] - async def check_offer_validity(self, offer: Offer) -> Tuple[bytes32, bool]: + async def check_offer_validity(self, offer: Offer) -> tuple[bytes32, bool]: res = await self.fetch("check_offer_validity", {"offer": offer.to_bech32()}) return bytes32.from_hexstr(res["id"]), res["valid"] @@ -856,9 +857,9 @@ async def take_offer( self, offer: Offer, tx_config: TXConfig, - solver: Optional[Dict[str, Any]] = None, + solver: Optional[dict[str, Any]] = None, fee: int = 0, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), push: bool = True, ) -> TakeOfferResponse: @@ -890,7 +891,7 @@ async def get_all_offers( exclude_my_offers: bool = False, exclude_taken_offers: bool = False, include_completed: bool = False, - ) -> List[TradeRecord]: + ) -> list[TradeRecord]: res = await self.fetch( "get_all_offers", { @@ -924,7 +925,7 @@ async def cancel_offer( tx_config: TXConfig, fee: int = 0, secure: bool = True, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), push: bool = True, ) -> CancelOfferResponse: @@ -951,7 +952,7 @@ async def cancel_offers( batch_size: int = 5, cancel_all: bool = False, asset_id: Optional[bytes32] = None, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), push: bool = True, ) -> CancelOffersResponse: @@ -977,7 +978,7 @@ async def get_cat_list(self) -> GetCATListResponse: return GetCATListResponse.from_json_dict(await self.fetch("get_cat_list", {})) # NFT wallet - async def create_new_nft_wallet(self, did_id: Optional[str], name: Optional[str] = None) -> Dict[str, Any]: + async def create_new_nft_wallet(self, did_id: Optional[str], name: Optional[str] = None) -> dict[str, Any]: request = {"wallet_type": "nft_wallet", "did_id": did_id, "name": name} response = await self.fetch("create_new_wallet", request) return response @@ -988,18 +989,18 @@ async def mint_nft( royalty_address: Optional[str], target_address: Optional[str], hash: str, - uris: List[str], + uris: list[str], tx_config: TXConfig, meta_hash: Optional[str] = "", - meta_uris: List[str] = [], + meta_uris: list[str] = [], license_hash: Optional[str] = "", - license_uris: List[str] = [], + license_uris: list[str] = [], edition_total: Optional[int] = 1, edition_number: Optional[int] = 1, fee: int = 0, royalty_percentage: int = 0, did_id: Optional[str] = None, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), push: bool = True, ) -> NFTMintNFTResponse: @@ -1034,7 +1035,7 @@ async def add_uri_to_nft( uri: str, fee: int, tx_config: TXConfig, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), push: bool = True, ) -> NFTAddURIResponse: @@ -1054,9 +1055,9 @@ async def add_uri_to_nft( async def nft_calculate_royalties( self, - royalty_assets_dict: Dict[Any, Tuple[Any, uint16]], - fungible_asset_dict: Dict[Any, uint64], - ) -> Dict[str, List[Dict[str, Any]]]: + royalty_assets_dict: dict[Any, tuple[Any, uint16]], + fungible_asset_dict: dict[Any, uint64], + ) -> dict[str, list[dict[str, Any]]]: request = { "royalty_assets": [ {"asset": id, "royalty_address": royalty_info[0], "royalty_percentage": royalty_info[1]} @@ -1068,7 +1069,7 @@ async def nft_calculate_royalties( del response["success"] return response - async def get_nft_info(self, coin_id: str, latest: bool = True) -> Dict[str, Any]: + async def get_nft_info(self, coin_id: str, latest: bool = True) -> dict[str, Any]: request = {"coin_id": coin_id, "latest": latest} response = await self.fetch("nft_get_info", request) return response @@ -1080,7 +1081,7 @@ async def transfer_nft( target_address: str, fee: int, tx_config: TXConfig, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), push: bool = True, ) -> NFTTransferNFTResponse: @@ -1097,12 +1098,12 @@ async def transfer_nft( response = await self.fetch("nft_transfer_nft", request) return json_deserialize_with_clvm_streamable(response, NFTTransferNFTResponse) - async def count_nfts(self, wallet_id: Optional[int]) -> Dict[str, Any]: + async def count_nfts(self, wallet_id: Optional[int]) -> dict[str, Any]: request = {"wallet_id": wallet_id} response = await self.fetch("nft_count_nfts", request) return response - async def list_nfts(self, wallet_id: int, num: int = 50, start_index: int = 0) -> Dict[str, Any]: + async def list_nfts(self, wallet_id: int, num: int = 50, start_index: int = 0) -> dict[str, Any]: request = {"wallet_id": wallet_id, "num": num, "start_index": start_index} response = await self.fetch("nft_get_nfts", request) return response @@ -1117,7 +1118,7 @@ async def set_nft_did( nft_coin_id: str, fee: int, tx_config: TXConfig, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), push: bool = True, ) -> NFTSetNFTDIDResponse: @@ -1138,7 +1139,7 @@ async def set_nft_did( async def set_nft_status(self, request: NFTSetNFTStatus) -> None: await self.fetch("nft_set_nft_status", request.to_json_dict()) - async def get_nft_wallet_did(self, wallet_id: int) -> Dict[str, Any]: + async def get_nft_wallet_did(self, wallet_id: int) -> dict[str, Any]: request = {"wallet_id": wallet_id} response = await self.fetch("nft_get_wallet_did", request) return response @@ -1149,21 +1150,21 @@ async def get_nft_wallets_with_dids(self) -> NFTGetWalletsWithDIDsResponse: async def nft_mint_bulk( self, wallet_id: int, - metadata_list: List[Dict[str, Any]], + metadata_list: list[dict[str, Any]], royalty_percentage: Optional[int], royalty_address: Optional[str], tx_config: TXConfig, - target_list: Optional[List[str]] = None, + target_list: Optional[list[str]] = None, mint_number_start: Optional[int] = 1, mint_total: Optional[int] = None, - xch_coins: Optional[List[Dict[str, Any]]] = None, + xch_coins: Optional[list[dict[str, Any]]] = None, xch_change_target: Optional[str] = None, new_innerpuzhash: Optional[str] = None, - did_coin: Optional[Dict[str, Any]] = None, + did_coin: Optional[dict[str, Any]] = None, did_lineage_parent: Optional[str] = None, mint_from_did: Optional[bool] = False, fee: Optional[int] = 0, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), push: bool = False, ) -> NFTMintBulkResponse: @@ -1194,7 +1195,7 @@ async def set_nft_did_bulk( self, request: NFTSetDIDBulk, tx_config: TXConfig, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), ) -> NFTSetDIDBulkResponse: return NFTSetDIDBulkResponse.from_json_dict( @@ -1207,7 +1208,7 @@ async def transfer_nft_bulk( self, request: NFTTransferBulk, tx_config: TXConfig, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), ) -> NFTTransferBulkResponse: return NFTTransferBulkResponse.from_json_dict( @@ -1221,9 +1222,9 @@ async def create_new_dl( self, root: bytes32, fee: uint64, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), - ) -> Tuple[List[TransactionRecord], bytes32]: + ) -> tuple[list[TransactionRecord], bytes32]: request = { "root": root.hex(), "fee": fee, @@ -1250,7 +1251,7 @@ async def dl_latest_singleton( response = await self.fetch("dl_latest_singleton", request) return None if response["singleton"] is None else SingletonRecord.from_json_dict(response["singleton"]) - async def dl_singletons_by_root(self, launcher_id: bytes32, root: bytes32) -> List[SingletonRecord]: + async def dl_singletons_by_root(self, launcher_id: bytes32, root: bytes32) -> list[SingletonRecord]: request = {"launcher_id": launcher_id.hex(), "root": root.hex()} response = await self.fetch("dl_singletons_by_root", request) return [SingletonRecord.from_json_dict(single) for single in response["singletons"]] @@ -1260,7 +1261,7 @@ async def dl_update_root( launcher_id: bytes32, new_root: bytes32, fee: uint64, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), ) -> TransactionRecord: request = { @@ -1275,11 +1276,11 @@ async def dl_update_root( async def dl_update_multiple( self, - update_dictionary: Dict[bytes32, bytes32], + update_dictionary: dict[bytes32, bytes32], fee: uint64, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), - ) -> List[TransactionRecord]: + ) -> list[TransactionRecord]: updates_as_strings = {str(lid): str(root) for lid, root in update_dictionary.items()} request = { "updates": updates_as_strings, @@ -1296,7 +1297,7 @@ async def dl_history( min_generation: Optional[uint32] = None, max_generation: Optional[uint32] = None, num_results: Optional[uint32] = None, - ) -> List[SingletonRecord]: + ) -> list[SingletonRecord]: request = {"launcher_id": launcher_id.hex()} if min_generation is not None: @@ -1309,11 +1310,11 @@ async def dl_history( response = await self.fetch("dl_history", request) return [SingletonRecord.from_json_dict(single) for single in response["history"]] - async def dl_owned_singletons(self) -> List[SingletonRecord]: + async def dl_owned_singletons(self) -> list[SingletonRecord]: response = await self.fetch(path="dl_owned_singletons", request_json={}) return [SingletonRecord.from_json_dict(singleton) for singleton in response["singletons"]] - async def dl_get_mirrors(self, launcher_id: bytes32) -> List[Mirror]: + async def dl_get_mirrors(self, launcher_id: bytes32) -> list[Mirror]: response = await self.fetch(path="dl_get_mirrors", request_json={"launcher_id": launcher_id.hex()}) return [Mirror.from_json_dict(mirror) for mirror in response["mirrors"]] @@ -1321,11 +1322,11 @@ async def dl_new_mirror( self, launcher_id: bytes32, amount: uint64, - urls: List[bytes], + urls: list[bytes], fee: uint64 = uint64(0), - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), - ) -> List[TransactionRecord]: + ) -> list[TransactionRecord]: response = await self.fetch( path="dl_new_mirror", request_json={ @@ -1343,9 +1344,9 @@ async def dl_delete_mirror( self, coin_id: bytes32, fee: uint64 = uint64(0), - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), - ) -> List[TransactionRecord]: + ) -> list[TransactionRecord]: response = await self.fetch( path="dl_delete_mirror", request_json={ @@ -1380,7 +1381,7 @@ async def send_notification( msg: bytes, amount: uint64, fee: uint64 = uint64(0), - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), push: bool = True, ) -> TransactionRecord: @@ -1398,13 +1399,13 @@ async def send_notification( ) return TransactionRecord.from_json_dict_convenience(response["tx"]) - async def sign_message_by_address(self, address: str, message: str) -> Tuple[str, str, str]: + async def sign_message_by_address(self, address: str, message: str) -> tuple[str, str, str]: response = await self.fetch("sign_message_by_address", {"address": address, "message": message}) return response["pubkey"], response["signature"], response["signing_mode"] async def sign_message_by_id( self, id: str, message: str, is_hex: bool = False, safe_mode: bool = True - ) -> Tuple[str, str, str]: + ) -> tuple[str, str, str]: response = await self.fetch( "sign_message_by_id", {"id": id, "message": message, "is_hex": is_hex, "safe_mode": safe_mode} ) @@ -1423,18 +1424,18 @@ async def create_new_dao_wallet( self, mode: str, tx_config: TXConfig, - dao_rules: Optional[Dict[str, uint64]] = None, + dao_rules: Optional[dict[str, uint64]] = None, amount_of_cats: Optional[uint64] = None, treasury_id: Optional[bytes32] = None, filter_amount: uint64 = uint64(1), name: Optional[str] = None, fee: uint64 = uint64(0), fee_for_cat: uint64 = uint64(0), - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), push: bool = True, timelock_info: ConditionValidTimes = ConditionValidTimes(), ) -> CreateNewDAOWalletResponse: - request: Dict[str, Any] = { + request: dict[str, Any] = { "wallet_type": "dao_wallet", "mode": mode, "treasury_id": treasury_id.hex() if treasury_id is not None else treasury_id, @@ -1452,12 +1453,12 @@ async def create_new_dao_wallet( response = await self.fetch("create_new_wallet", request) return json_deserialize_with_clvm_streamable(response, CreateNewDAOWalletResponse) - async def dao_get_treasury_id(self, wallet_id: int) -> Dict[str, Any]: + async def dao_get_treasury_id(self, wallet_id: int) -> dict[str, Any]: request = {"wallet_id": wallet_id} response = await self.fetch("dao_get_treasury_id", request) return response - async def dao_get_rules(self, wallet_id: int) -> Dict[str, Any]: + async def dao_get_rules(self, wallet_id: int) -> dict[str, Any]: request = {"wallet_id": wallet_id} response = await self.fetch("dao_get_rules", request) return response @@ -1467,19 +1468,19 @@ async def dao_create_proposal( wallet_id: int, proposal_type: str, tx_config: TXConfig, - additions: Optional[List[Dict[str, Any]]] = None, + additions: Optional[list[dict[str, Any]]] = None, amount: Optional[uint64] = None, inner_address: Optional[str] = None, asset_id: Optional[str] = None, cat_target_address: Optional[str] = None, vote_amount: Optional[int] = None, - new_dao_rules: Optional[Dict[str, Optional[uint64]]] = None, + new_dao_rules: Optional[dict[str, Optional[uint64]]] = None, fee: uint64 = uint64(0), - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), push: bool = True, timelock_info: ConditionValidTimes = ConditionValidTimes(), ) -> DAOCreateProposalResponse: - request: Dict[str, Any] = { + request: dict[str, Any] = { "wallet_id": wallet_id, "proposal_type": proposal_type, "additions": additions, @@ -1498,12 +1499,12 @@ async def dao_create_proposal( response = await self.fetch("dao_create_proposal", request) return json_deserialize_with_clvm_streamable(response, DAOCreateProposalResponse) - async def dao_get_proposal_state(self, wallet_id: int, proposal_id: str) -> Dict[str, Any]: + async def dao_get_proposal_state(self, wallet_id: int, proposal_id: str) -> dict[str, Any]: request = {"wallet_id": wallet_id, "proposal_id": proposal_id} response = await self.fetch("dao_get_proposal_state", request) return response - async def dao_parse_proposal(self, wallet_id: int, proposal_id: str) -> Dict[str, Any]: + async def dao_parse_proposal(self, wallet_id: int, proposal_id: str) -> dict[str, Any]: request = {"wallet_id": wallet_id, "proposal_id": proposal_id} response = await self.fetch("dao_parse_proposal", request) return response @@ -1516,11 +1517,11 @@ async def dao_vote_on_proposal( tx_config: TXConfig, is_yes_vote: bool = True, fee: uint64 = uint64(0), - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), push: bool = True, timelock_info: ConditionValidTimes = ConditionValidTimes(), ) -> DAOVoteOnProposalResponse: - request: Dict[str, Any] = { + request: dict[str, Any] = { "wallet_id": wallet_id, "proposal_id": proposal_id, "vote_amount": vote_amount, @@ -1534,7 +1535,7 @@ async def dao_vote_on_proposal( response = await self.fetch("dao_vote_on_proposal", request) return json_deserialize_with_clvm_streamable(response, DAOVoteOnProposalResponse) - async def dao_get_proposals(self, wallet_id: int, include_closed: bool = True) -> Dict[str, Any]: + async def dao_get_proposals(self, wallet_id: int, include_closed: bool = True) -> dict[str, Any]: request = {"wallet_id": wallet_id, "include_closed": include_closed} response = await self.fetch("dao_get_proposals", request) return response @@ -1546,11 +1547,11 @@ async def dao_close_proposal( tx_config: TXConfig, self_destruct: Optional[bool] = None, fee: uint64 = uint64(0), - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), push: bool = True, timelock_info: ConditionValidTimes = ConditionValidTimes(), ) -> DAOCloseProposalResponse: - request: Dict[str, Any] = { + request: dict[str, Any] = { "wallet_id": wallet_id, "proposal_id": proposal_id, "self_destruct": self_destruct, @@ -1568,11 +1569,11 @@ async def dao_free_coins_from_finished_proposals( wallet_id: int, tx_config: TXConfig, fee: uint64 = uint64(0), - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), push: bool = True, timelock_info: ConditionValidTimes = ConditionValidTimes(), ) -> DAOFreeCoinsFromFinishedProposalsResponse: - request: Dict[str, Any] = { + request: dict[str, Any] = { "wallet_id": wallet_id, "fee": fee, "extra_conditions": list(extra_conditions), @@ -1582,7 +1583,7 @@ async def dao_free_coins_from_finished_proposals( response = await self.fetch("dao_free_coins_from_finished_proposals", request) return json_deserialize_with_clvm_streamable(response, DAOFreeCoinsFromFinishedProposalsResponse) - async def dao_get_treasury_balance(self, wallet_id: int) -> Dict[str, Any]: + async def dao_get_treasury_balance(self, wallet_id: int) -> dict[str, Any]: request = {"wallet_id": wallet_id} response = await self.fetch("dao_get_treasury_balance", request) return response @@ -1594,11 +1595,11 @@ async def dao_add_funds_to_treasury( amount: uint64, tx_config: TXConfig, fee: uint64 = uint64(0), - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), push: bool = True, timelock_info: ConditionValidTimes = ConditionValidTimes(), ) -> DAOAddFundsToTreasuryResponse: - request: Dict[str, Any] = { + request: dict[str, Any] = { "wallet_id": wallet_id, "funding_wallet_id": funding_wallet_id, "amount": amount, @@ -1617,11 +1618,11 @@ async def dao_send_to_lockup( amount: uint64, tx_config: TXConfig, fee: uint64 = uint64(0), - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), push: bool = True, timelock_info: ConditionValidTimes = ConditionValidTimes(), ) -> DAOSendToLockupResponse: - request: Dict[str, Any] = { + request: dict[str, Any] = { "wallet_id": wallet_id, "amount": amount, "fee": fee, @@ -1637,13 +1638,13 @@ async def dao_exit_lockup( self, wallet_id: int, tx_config: TXConfig, - coins: Optional[List[Dict[str, Any]]] = None, + coins: Optional[list[dict[str, Any]]] = None, fee: uint64 = uint64(0), - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), push: bool = True, timelock_info: ConditionValidTimes = ConditionValidTimes(), ) -> DAOExitLockupResponse: - request: Dict[str, Any] = { + request: dict[str, Any] = { "wallet_id": wallet_id, "coins": coins, "fee": fee, @@ -1655,7 +1656,7 @@ async def dao_exit_lockup( response = await self.fetch("dao_exit_lockup", request) return json_deserialize_with_clvm_streamable(response, DAOExitLockupResponse) - async def dao_adjust_filter_level(self, wallet_id: int, filter_level: int) -> Dict[str, Any]: + async def dao_adjust_filter_level(self, wallet_id: int, filter_level: int) -> dict[str, Any]: request = {"wallet_id": wallet_id, "filter_level": filter_level} response = await self.fetch("dao_adjust_filter_level", request) return response @@ -1666,7 +1667,7 @@ async def vc_mint( tx_config: TXConfig, target_address: Optional[bytes32] = None, fee: uint64 = uint64(0), - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), push: bool = True, ) -> VCMintResponse: @@ -1688,7 +1689,7 @@ async def vc_get(self, vc_id: bytes32) -> Optional[VCRecord]: response = await self.fetch("vc_get", {"vc_id": vc_id.hex()}) return None if response["vc_record"] is None else VCRecord.from_json_dict(response["vc_record"]) - async def vc_get_list(self, start: int = 0, count: int = 50) -> Tuple[List[VCRecord], Dict[str, Any]]: + async def vc_get_list(self, start: int = 0, count: int = 50) -> tuple[list[VCRecord], dict[str, Any]]: response = await self.fetch("vc_get_list", {"start": start, "count": count}) return [VCRecord.from_json_dict(rec) for rec in response["vc_records"]], response["proofs"] @@ -1700,7 +1701,7 @@ async def vc_spend( new_proof_hash: Optional[bytes32] = None, provider_inner_puzhash: Optional[bytes32] = None, fee: uint64 = uint64(0), - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), push: bool = True, ) -> VCSpendResponse: @@ -1722,19 +1723,19 @@ async def vc_spend( ) return json_deserialize_with_clvm_streamable(response, VCSpendResponse) - async def vc_add_proofs(self, proofs: Dict[str, Any]) -> None: + async def vc_add_proofs(self, proofs: dict[str, Any]) -> None: await self.fetch("vc_add_proofs", {"proofs": proofs}) - async def vc_get_proofs_for_root(self, root: bytes32) -> Dict[str, Any]: + async def vc_get_proofs_for_root(self, root: bytes32) -> dict[str, Any]: response = await self.fetch("vc_get_proofs_for_root", {"root": root.hex()}) - return cast(Dict[str, Any], response["proofs"]) + return cast(dict[str, Any], response["proofs"]) async def vc_revoke( self, vc_parent_id: bytes32, tx_config: TXConfig, fee: uint64 = uint64(0), - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), push: bool = True, ) -> VCRevokeResponse: @@ -1759,7 +1760,7 @@ async def crcat_approve_pending( fee: uint64 = uint64(0), push: bool = True, timelock_info: ConditionValidTimes = ConditionValidTimes(), - ) -> List[TransactionRecord]: + ) -> list[TransactionRecord]: response = await self.fetch( "crcat_approve_pending", { @@ -1821,7 +1822,7 @@ async def split_coins( self, args: SplitCoins, tx_config: TXConfig, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), ) -> SplitCoinsResponse: return SplitCoinsResponse.from_json_dict( @@ -1834,7 +1835,7 @@ async def combine_coins( self, args: CombineCoins, tx_config: TXConfig, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), timelock_info: ConditionValidTimes = ConditionValidTimes(), ) -> CombineCoinsResponse: return CombineCoinsResponse.from_json_dict( diff --git a/chia/seeder/crawl_store.py b/chia/seeder/crawl_store.py index e740e8c83e3a..b8edeaa410f4 100644 --- a/chia/seeder/crawl_store.py +++ b/chia/seeder/crawl_store.py @@ -6,7 +6,6 @@ import time from dataclasses import dataclass, field, replace from datetime import datetime, timedelta -from typing import Dict, List import aiosqlite @@ -19,9 +18,9 @@ @dataclass class CrawlStore: crawl_db: aiosqlite.Connection - host_to_records: Dict[str, PeerRecord] = field(default_factory=dict) # peer_id: PeerRecord - host_to_selected_time: Dict[str, float] = field(default_factory=dict) # peer_id: timestamp (as a float) - host_to_reliability: Dict[str, PeerReliability] = field(default_factory=dict) # peer_id: PeerReliability + host_to_records: dict[str, PeerRecord] = field(default_factory=dict) # peer_id: PeerRecord + host_to_selected_time: dict[str, float] = field(default_factory=dict) # peer_id: timestamp (as a float) + host_to_reliability: dict[str, PeerReliability] = field(default_factory=dict) # peer_id: PeerReliability banned_peers: int = 0 ignored_peers: int = 0 reliable_peers: int = 0 @@ -184,17 +183,16 @@ async def peer_connected_hostname(self, host: str, connected: bool = True, tls_v else: await self.peer_failed_to_connect(record) - async def get_peers_to_crawl(self, min_batch_size: int, max_batch_size: int) -> List[PeerRecord]: + async def get_peers_to_crawl(self, min_batch_size: int, max_batch_size: int) -> list[PeerRecord]: now = int(time.time()) records = [] records_v6 = [] counter = 0 self.ignored_peers = 0 self.banned_peers = 0 - for peer_id in self.host_to_reliability: + for peer_id, reliability in self.host_to_reliability.items(): add = False counter += 1 - reliability = self.host_to_reliability[peer_id] if reliability.ignore_till < now and reliability.ban_till < now: add = True else: @@ -410,7 +408,7 @@ async def prune_old_peers(self, older_than_days: int) -> None: await self.crawl_db.commit() await self.crawl_db.execute("VACUUM") - to_delete: List[str] = [] + to_delete: list[str] = [] # Deletes the old records from the in memory Dicts for peer_id, peer_record in self.host_to_records.items(): diff --git a/chia/seeder/crawler.py b/chia/seeder/crawler.py index 318ee309b86e..fafe10b46587 100644 --- a/chia/seeder/crawler.py +++ b/chia/seeder/crawler.py @@ -7,22 +7,10 @@ import time import traceback from collections import defaultdict +from collections.abc import AsyncIterator, Awaitable from dataclasses import dataclass, field from pathlib import Path -from typing import ( - TYPE_CHECKING, - Any, - AsyncIterator, - Awaitable, - Callable, - ClassVar, - Dict, - List, - Optional, - Set, - Tuple, - cast, -) +from typing import TYPE_CHECKING, Any, Callable, ClassVar, Optional, cast import aiosqlite @@ -52,7 +40,7 @@ class Crawler: _protocol_check: ClassVar[RpcServiceProtocol] = cast("Crawler", None) - config: Dict[str, Any] + config: dict[str, Any] root_path: Path constants: ConsensusConstants print_status: bool = True @@ -63,15 +51,15 @@ class Crawler: log: logging.Logger = log _shut_down: bool = False peer_count: int = 0 - with_peak: Set[PeerInfo] = field(default_factory=set) - seen_nodes: Set[str] = field(default_factory=set) + with_peak: set[PeerInfo] = field(default_factory=set) + seen_nodes: set[str] = field(default_factory=set) minimum_version_count: int = 0 - peers_retrieved: List[RespondPeers] = field(default_factory=list) - host_to_version: Dict[str, str] = field(default_factory=dict) - versions: Dict[str, int] = field(default_factory=lambda: defaultdict(int)) - version_cache: List[Tuple[str, str]] = field(default_factory=list) - handshake_time: Dict[str, uint64] = field(default_factory=dict) - best_timestamp_per_peer: Dict[str, uint64] = field(default_factory=dict) + peers_retrieved: list[RespondPeers] = field(default_factory=list) + host_to_version: dict[str, str] = field(default_factory=dict) + versions: dict[str, int] = field(default_factory=lambda: defaultdict(int)) + version_cache: list[tuple[str, str]] = field(default_factory=list) + handshake_time: dict[str, uint64] = field(default_factory=dict) + best_timestamp_per_peer: dict[str, uint64] = field(default_factory=dict) start_crawler_loop: bool = True @property @@ -129,7 +117,7 @@ def __post_init__(self) -> None: def _set_state_changed_callback(self, callback: StateChangedProtocol) -> None: self.state_changed_callback = callback - def get_connections(self, request_node_type: Optional[NodeType]) -> List[Dict[str, Any]]: + def get_connections(self, request_node_type: Optional[NodeType]) -> list[dict[str, Any]]: return default_get_connections(server=self.server, request_node_type=request_node_type) async def create_client( @@ -300,7 +288,7 @@ async def crawl(self) -> None: self.versions[version] += 1 # clear caches - self.version_cache: List[Tuple[str, str]] = [] + self.version_cache: list[tuple[str, str]] = [] self.peers_retrieved = [] self.server.banned_peers = {} self.with_peak = set() @@ -335,7 +323,7 @@ async def save_to_db(self) -> None: def set_server(self, server: ChiaServer) -> None: self._server = server - def _state_changed(self, change: str, change_data: Optional[Dict[str, Any]] = None) -> None: + def _state_changed(self, change: str, change_data: Optional[dict[str, Any]] = None) -> None: if self.state_changed_callback is not None: self.state_changed_callback(change, change_data) @@ -362,7 +350,7 @@ async def new_peak(self, request: full_node_protocol.NewPeak, peer: WSChiaConnec async def on_connect(self, connection: WSChiaConnection) -> None: pass - async def print_summary(self, t_start: float, total_nodes: int, tried_nodes: Set[str]) -> None: + async def print_summary(self, t_start: float, total_nodes: int, tried_nodes: set[str]) -> None: assert self.crawl_store is not None # this is only ever called from the crawl task if not self.print_status: return diff --git a/chia/seeder/dns_server.py b/chia/seeder/dns_server.py index 7ef245466ba8..d0efe7de6ad7 100644 --- a/chia/seeder/dns_server.py +++ b/chia/seeder/dns_server.py @@ -5,13 +5,14 @@ import signal import sys import traceback +from collections.abc import AsyncIterator, Awaitable from contextlib import asynccontextmanager from dataclasses import dataclass, field from ipaddress import IPv4Address, IPv6Address, ip_address from multiprocessing import freeze_support from pathlib import Path from types import FrameType -from typing import Any, AsyncIterator, Awaitable, Callable, Dict, List, Optional +from typing import Any, Callable, Optional import aiosqlite from dnslib import AAAA, EDNS0, NS, QTYPE, RCODE, RD, RR, SOA, A, DNSError, DNSHeader, DNSQuestion, DNSRecord @@ -38,8 +39,8 @@ def __getattr__(self, item: str) -> DomainName: @dataclass(frozen=True) class PeerList: - ipv4: List[IPv4Address] - ipv6: List[IPv6Address] + ipv4: list[IPv4Address] + ipv6: list[IPv6Address] @property def no_peers(self) -> bool: @@ -121,7 +122,7 @@ class TCPDNSServerProtocol(asyncio.BufferedProtocol): peer_info: str = field(init=False, default="") expected_length: int = 0 buffer: bytearray = field(init=False, default_factory=lambda: bytearray(2)) - futures: List[asyncio.Future[None]] = field(init=False, default_factory=list) + futures: list[asyncio.Future[None]] = field(init=False, default_factory=list) def connection_made(self, transport: asyncio.BaseTransport) -> None: """ @@ -263,7 +264,7 @@ async def get_dns_reply(callback: DnsCallback, dns_request: DNSRecord) -> DNSRec @dataclass class DNSServer: - config: Dict[str, Any] + config: dict[str, Any] root_path: Path lock: asyncio.Lock = field(default_factory=asyncio.Lock) shutdown_event: asyncio.Event = field(default_factory=asyncio.Event) @@ -282,11 +283,11 @@ class DNSServer: db_path: Path = field(init=False) domain: DomainName = field(init=False) ns1: DomainName = field(init=False) - ns_records: List[RR] = field(init=False) + ns_records: list[RR] = field(init=False) ttl: int = field(init=False) soa_record: RR = field(init=False) - reliable_peers_v4: List[IPv4Address] = field(default_factory=list) - reliable_peers_v6: List[IPv6Address] = field(default_factory=list) + reliable_peers_v4: list[IPv4Address] = field(default_factory=list) + reliable_peers_v6: list[IPv6Address] = field(default_factory=list) pointer_v4: int = 0 pointer_v6: int = 0 @@ -307,7 +308,7 @@ def __post_init__(self) -> None: if not self.domain.endswith("."): self.domain = DomainName(self.domain + ".") # Make sure the domain ends with a period, as per RFC 1035. self.ns1: DomainName = DomainName(self.config["nameserver"]) - self.ns_records: List[NS] = [NS(self.ns1)] + self.ns_records: list[NS] = [NS(self.ns1)] self.ttl: int = self.config["ttl"] self.soa_record: SOA = SOA( mname=self.ns1, # primary name server @@ -427,7 +428,7 @@ async def periodically_get_reliable_peers(self) -> None: async def get_peers_to_respond(self, ipv4_count: int, ipv6_count: int) -> PeerList: async with self.lock: # Append IPv4. - ipv4_peers: List[IPv4Address] = [] + ipv4_peers: list[IPv4Address] = [] size = len(self.reliable_peers_v4) if ipv4_count > 0 and size <= ipv4_count: ipv4_peers = self.reliable_peers_v4 @@ -437,7 +438,7 @@ async def get_peers_to_respond(self, ipv4_count: int, ipv6_count: int) -> PeerLi ] self.pointer_v4 = (self.pointer_v4 + ipv4_count) % size # mark where we left off # Append IPv6. - ipv6_peers: List[IPv6Address] = [] + ipv6_peers: list[IPv6Address] = [] size = len(self.reliable_peers_v6) if ipv6_count > 0 and size <= ipv6_count: ipv6_peers = self.reliable_peers_v6 @@ -473,7 +474,7 @@ async def dns_response(self, request: DNSRecord) -> DNSRecord: ttl: int = self.ttl # we add these to the list as it will allow us to respond to ns and soa requests - ips: List[RD] = [self.soa_record] + self.ns_records + ips: list[RD] = [self.soa_record] + self.ns_records ipv4_count = 0 ipv6_count = 0 if question_type is QTYPE.A: @@ -493,7 +494,7 @@ async def dns_response(self, request: DNSRecord) -> DNSRecord: ips.extend([A(str(peer)) for peer in peers.ipv4]) ips.extend([AAAA(str(peer)) for peer in peers.ipv6]) - records: Dict[DomainName, List[RD]] = { # this is where we can add other records we want to serve + records: dict[DomainName, list[RD]] = { # this is where we can add other records we want to serve self.domain: ips, } @@ -521,7 +522,7 @@ async def run_dns_server(dns_server: DNSServer) -> None: # pragma: no cover await dns_server.shutdown_event.wait() # this is released on SIGINT or SIGTERM or any unhandled exception -def create_dns_server_service(config: Dict[str, Any], root_path: Path) -> DNSServer: +def create_dns_server_service(config: dict[str, Any], root_path: Path) -> DNSServer: service_config = config[SERVICE_NAME] return DNSServer(service_config, root_path) diff --git a/chia/seeder/start_crawler.py b/chia/seeder/start_crawler.py index 6462e89b6915..ae14c3b6a44f 100644 --- a/chia/seeder/start_crawler.py +++ b/chia/seeder/start_crawler.py @@ -4,7 +4,7 @@ import pathlib import sys from multiprocessing import freeze_support -from typing import Any, Dict, Optional +from typing import Any, Optional from chia.consensus.constants import ConsensusConstants, replace_str_to_bytes from chia.consensus.default_constants import DEFAULT_CONSTANTS @@ -28,7 +28,7 @@ def create_full_node_crawler_service( root_path: pathlib.Path, - config: Dict[str, Any], + config: dict[str, Any], consensus_constants: ConsensusConstants, connect_to_daemon: bool = True, start_crawler_loop: bool = True, diff --git a/chia/server/address_manager.py b/chia/server/address_manager.py index 609c8dfb8e61..4cb1c0c8a516 100644 --- a/chia/server/address_manager.py +++ b/chia/server/address_manager.py @@ -6,7 +6,7 @@ from asyncio import Lock from random import choice, randrange from secrets import randbits -from typing import Dict, List, Optional, Set, Tuple +from typing import Optional from chia.types.peer_info import PeerInfo, TimestampedPeerInfo from chia.util.hash import std_hash @@ -162,17 +162,17 @@ def get_selection_chance(self, now: Optional[int] = None) -> float: class AddressManager: id_count: int key: int - random_pos: List[int] - tried_matrix: List[List[int]] - new_matrix: List[List[int]] + random_pos: list[int] + tried_matrix: list[list[int]] + new_matrix: list[list[int]] tried_count: int new_count: int - map_addr: Dict[str, int] - map_info: Dict[int, ExtendedPeerInfo] + map_addr: dict[str, int] + map_info: dict[int, ExtendedPeerInfo] last_good: int - tried_collisions: List[int] - used_new_matrix_positions: Set[Tuple[int, int]] - used_tried_matrix_positions: Set[Tuple[int, int]] + tried_collisions: list[int] + used_new_matrix_positions: set[tuple[int, int]] + used_tried_matrix_positions: set[tuple[int, int]] allow_private_subnets: bool def __init__(self) -> None: @@ -230,7 +230,7 @@ def load_used_table_positions(self) -> None: if self.tried_matrix[bucket][pos] != -1: self.used_tried_matrix_positions.add((bucket, pos)) - def create_(self, addr: TimestampedPeerInfo, addr_src: Optional[PeerInfo]) -> Tuple[ExtendedPeerInfo, int]: + def create_(self, addr: TimestampedPeerInfo, addr_src: Optional[PeerInfo]) -> tuple[ExtendedPeerInfo, int]: self.id_count += 1 node_id = self.id_count self.map_info[node_id] = ExtendedPeerInfo(addr, addr_src) @@ -239,7 +239,7 @@ def create_(self, addr: TimestampedPeerInfo, addr_src: Optional[PeerInfo]) -> Tu self.random_pos.append(node_id) return (self.map_info[node_id], node_id) - def find_(self, addr: PeerInfo) -> Tuple[Optional[ExtendedPeerInfo], Optional[int]]: + def find_(self, addr: PeerInfo) -> tuple[Optional[ExtendedPeerInfo], Optional[int]]: if addr.host not in self.map_addr: return (None, None) node_id = self.map_addr[addr.host] @@ -445,7 +445,7 @@ def select_peer_(self, new_only: bool) -> Optional[ExtendedPeerInfo]: if not new_only and self.tried_count > 0 and (self.new_count == 0 or randrange(2) == 0): chance = 1.0 start = time.time() - cached_tried_matrix_positions: List[Tuple[int, int]] = [] + cached_tried_matrix_positions: list[tuple[int, int]] = [] if len(self.used_tried_matrix_positions) < math.sqrt(TRIED_BUCKET_COUNT * BUCKET_SIZE): cached_tried_matrix_positions = list(self.used_tried_matrix_positions) while True: @@ -475,7 +475,7 @@ def select_peer_(self, new_only: bool) -> Optional[ExtendedPeerInfo]: else: chance = 1.0 start = time.time() - cached_new_matrix_positions: List[Tuple[int, int]] = [] + cached_new_matrix_positions: list[tuple[int, int]] = [] if len(self.used_new_matrix_positions) < math.sqrt(NEW_BUCKET_COUNT * BUCKET_SIZE): cached_new_matrix_positions = list(self.used_new_matrix_positions) while True: @@ -542,8 +542,8 @@ def select_tried_collision_(self) -> Optional[ExtendedPeerInfo]: old_id = self.tried_matrix[tried_bucket][tried_bucket_pos] return self.map_info[old_id] - def get_peers_(self) -> List[TimestampedPeerInfo]: - addr: List[TimestampedPeerInfo] = [] + def get_peers_(self) -> list[TimestampedPeerInfo]: + addr: list[TimestampedPeerInfo] = [] num_nodes = min(1000, math.ceil(23 * len(self.random_pos) / 100)) for n in range(len(self.random_pos)): if len(addr) >= num_nodes: @@ -596,7 +596,7 @@ async def size(self) -> int: async def add_to_new_table( self, - addresses: List[TimestampedPeerInfo], + addresses: list[TimestampedPeerInfo], source: Optional[PeerInfo] = None, penalty: int = 0, ) -> bool: @@ -647,7 +647,7 @@ async def select_peer(self, new_only: bool = False) -> Optional[ExtendedPeerInfo return self.select_peer_(new_only) # Return a bunch of addresses, selected at random. - async def get_peers(self) -> List[TimestampedPeerInfo]: + async def get_peers(self) -> list[TimestampedPeerInfo]: async with self.lock: return self.get_peers_() diff --git a/chia/server/address_manager_store.py b/chia/server/address_manager_store.py index be840680d830..51610974f8d3 100644 --- a/chia/server/address_manager_store.py +++ b/chia/server/address_manager_store.py @@ -5,7 +5,7 @@ from dataclasses import dataclass from pathlib import Path from timeit import default_timer as timer -from typing import Any, Dict, List, Optional, Tuple +from typing import Any, Optional import aiofiles @@ -30,19 +30,19 @@ class PeerDataSerialization(Streamable): Serializable property bag for the peer data that was previously stored in sqlite. """ - metadata: List[Tuple[str, str]] - nodes: List[Tuple[uint64, str]] - new_table: List[Tuple[uint64, uint64]] + metadata: list[tuple[str, str]] + nodes: list[tuple[uint64, str]] + new_table: list[tuple[uint64, uint64]] async def makePeerDataSerialization( - metadata: List[Tuple[str, Any]], nodes: List[Tuple[int, ExtendedPeerInfo]], new_table: List[Tuple[int, int]] + metadata: list[tuple[str, Any]], nodes: list[tuple[int, ExtendedPeerInfo]], new_table: list[tuple[int, int]] ) -> bytes: """ Create a PeerDataSerialization, adapting the provided collections """ - transformed_nodes: List[Tuple[uint64, str]] = [] - transformed_new_table: List[Tuple[uint64, uint64]] = [] + transformed_nodes: list[tuple[uint64, str]] = [] + transformed_new_table: list[tuple[uint64, uint64]] = [] for index, [node_id, peer_info] in enumerate(nodes): transformed_nodes.append((uint64(node_id), peer_info.to_string())) @@ -101,10 +101,10 @@ async def serialize(cls, address_manager: AddressManager, peers_file_path: Path) """ Serialize the address manager's peer data to a file. """ - metadata: List[Tuple[str, str]] = [] - nodes: List[Tuple[int, ExtendedPeerInfo]] = [] - new_table_entries: List[Tuple[int, int]] = [] - unique_ids: Dict[int, int] = {} + metadata: list[tuple[str, str]] = [] + nodes: list[tuple[int, ExtendedPeerInfo]] = [] + new_table_entries: list[tuple[int, int]] = [] + unique_ids: dict[int, int] = {} count_ids: int = 0 log.info("Serializing peer data") @@ -157,11 +157,11 @@ async def _deserialize(cls, peers_file_path: Path) -> AddressManager: log.exception(f"Unable to deserialize peers from {peers_file_path}") if peer_data is not None: - metadata: Dict[str, str] = {key: value for key, value in peer_data.metadata} - nodes: List[Tuple[int, ExtendedPeerInfo]] = [ + metadata: dict[str, str] = {key: value for key, value in peer_data.metadata} + nodes: list[tuple[int, ExtendedPeerInfo]] = [ (node_id, ExtendedPeerInfo.from_string(info_str)) for node_id, info_str in peer_data.nodes ] - new_table_entries: List[Tuple[int, int]] = [(node_id, bucket) for node_id, bucket in peer_data.new_table] + new_table_entries: list[tuple[int, int]] = [(node_id, bucket) for node_id, bucket in peer_data.new_table] log.debug(f"Deserializing peer data took {timer() - start_time} seconds") address_manager.key = int(metadata["key"]) @@ -226,9 +226,9 @@ async def _read_peers(cls, peers_file_path: Path) -> PeerDataSerialization: async def _write_peers( cls, peers_file_path: Path, - metadata: List[Tuple[str, Any]], - nodes: List[Tuple[int, ExtendedPeerInfo]], - new_table: List[Tuple[int, int]], + metadata: list[tuple[str, Any]], + nodes: list[tuple[int, ExtendedPeerInfo]], + new_table: list[tuple[int, int]], ) -> None: """ Serializes the given peer data and writes it to the peers file. diff --git a/chia/server/capabilities.py b/chia/server/capabilities.py index cdb9acbb80d7..0b03ddbda40d 100644 --- a/chia/server/capabilities.py +++ b/chia/server/capabilities.py @@ -1,16 +1,16 @@ from __future__ import annotations -from typing import Iterable, List, Set, Tuple +from collections.abc import Iterable from chia.protocols.shared_protocol import Capability from chia.util.ints import uint16 -def known_active_capabilities(values: Iterable[Tuple[uint16, str]]) -> List[Capability]: +def known_active_capabilities(values: Iterable[tuple[uint16, str]]) -> list[Capability]: # NOTE: order is not guaranteed # TODO: what if there's a claim for both supporting and not? # presently it considers it supported - filtered: Set[Capability] = set() + filtered: set[Capability] = set() for value, state in values: if state != "1": continue diff --git a/chia/server/chia_policy.py b/chia/server/chia_policy.py index eab2f7982416..25a1a8dfaa14 100644 --- a/chia/server/chia_policy.py +++ b/chia/server/chia_policy.py @@ -11,7 +11,8 @@ import _overlapped import _winapi -from typing import TYPE_CHECKING, Any, Callable, Iterable, Optional, Tuple, Union +from collections.abc import Iterable +from typing import TYPE_CHECKING, Any, Callable, Optional, Union from typing_extensions import Protocol, TypeAlias @@ -83,7 +84,7 @@ def _register( self, ov: _overlapped.Overlapped, obj: socket.socket, - callback: Callable[[object, socket.socket, _overlapped.Overlapped], Tuple[socket.socket, object]], + callback: Callable[[object, socket.socket, _overlapped.Overlapped], tuple[socket.socket, object]], ) -> _OverlappedFuture: ... def _get_accept_socket(self, family: socket.AddressFamily) -> socket.socket: ... @@ -251,7 +252,7 @@ def enable_connections(self) -> None: def disable_connections(self) -> None: self.allow_connections = False - async def _chia_accept_loop(self, listener: socket.socket) -> Tuple[socket.socket, Tuple[object, ...]]: + async def _chia_accept_loop(self, listener: socket.socket) -> tuple[socket.socket, tuple[object, ...]]: while True: # TODO: switch to Event code. while not self.allow_connections: @@ -266,7 +267,7 @@ async def _chia_accept_loop(self, listener: socket.socket) -> Tuple[socket.socke ): raise - def _chia_accept(self, listener: socket.socket) -> asyncio.Future[Tuple[socket.socket, Tuple[object, ...]]]: + def _chia_accept(self, listener: socket.socket) -> asyncio.Future[tuple[socket.socket, tuple[object, ...]]]: self._register_with_iocp(listener) conn = self._get_accept_socket(listener.family) # pylint: disable=assignment-from-no-return ov = _overlapped.Overlapped(_winapi.NULL) @@ -274,7 +275,7 @@ def _chia_accept(self, listener: socket.socket) -> asyncio.Future[Tuple[socket.s def finish_accept( trans: object, key: socket.socket, ov: _overlapped.Overlapped - ) -> Tuple[socket.socket, object]: + ) -> tuple[socket.socket, object]: ov.getresult() # Use SO_UPDATE_ACCEPT_CONTEXT so getsockname() etc work. buf = struct.pack("@P", listener.fileno()) @@ -302,7 +303,7 @@ async def accept_coro(self: ChiaProactor, future: asyncio.Future[object], conn: asyncio.ensure_future(coro, loop=self._loop) return future - def accept(self, listener: socket.socket) -> asyncio.Future[Tuple[socket.socket, Tuple[object, ...]]]: + def accept(self, listener: socket.socket) -> asyncio.Future[tuple[socket.socket, tuple[object, ...]]]: coro = self._chia_accept_loop(listener) return asyncio.ensure_future(coro) diff --git a/chia/server/introducer_peers.py b/chia/server/introducer_peers.py index 69d4a3f8eea8..37ac9a5ce8b4 100644 --- a/chia/server/introducer_peers.py +++ b/chia/server/introducer_peers.py @@ -3,7 +3,7 @@ import random import time from dataclasses import dataclass -from typing import List, Optional, Set +from typing import Optional from chia.types.peer_info import PeerInfo from chia.util.ints import uint16, uint64 @@ -40,7 +40,7 @@ class IntroducerPeers: """ def __init__(self) -> None: - self._peers: Set[VettedPeer] = set() + self._peers: set[VettedPeer] = set() def add(self, peer: Optional[PeerInfo]) -> bool: if peer is None or not peer.port: @@ -66,7 +66,7 @@ def remove(self, peer: Optional[VettedPeer]) -> bool: def get_peers( self, max_peers: int = 0, randomize: bool = False, recent_threshold: float = 9999999 - ) -> List[VettedPeer]: + ) -> list[VettedPeer]: target_peers = [peer for peer in self._peers if time.time() - float(peer.time_added) < recent_threshold] if not max_peers or max_peers > len(target_peers): max_peers = len(target_peers) diff --git a/chia/server/node_discovery.py b/chia/server/node_discovery.py index 85213da1af51..e6269e348894 100644 --- a/chia/server/node_discovery.py +++ b/chia/server/node_discovery.py @@ -9,7 +9,7 @@ from pathlib import Path from random import Random from secrets import randbits -from typing import Any, Dict, List, Optional, Set, Tuple +from typing import Any, Optional import dns.asyncresolver @@ -46,8 +46,8 @@ def __init__( server: ChiaServer, target_outbound_count: int, peers_file_path: Path, - introducer_info: Optional[Dict[str, Any]], - dns_servers: List[str], + introducer_info: Optional[dict[str, Any]], + dns_servers: list[str], peer_connect_interval: int, selected_network: str, default_port: Optional[int], @@ -68,10 +68,10 @@ def __init__( self.enable_private_networks = False self.peer_connect_interval = peer_connect_interval self.log = log - self.relay_queue: Optional[asyncio.Queue[Tuple[TimestampedPeerInfo, int]]] = None + self.relay_queue: Optional[asyncio.Queue[tuple[TimestampedPeerInfo, int]]] = None self.address_manager: Optional[AddressManager] = None - self.connection_time_pretest: Dict[str, Any] = {} - self.received_count_from_peers: Dict[str, Any] = {} + self.connection_time_pretest: dict[str, Any] = {} + self.received_count_from_peers: dict[str, Any] = {} self.lock = asyncio.Lock() self.connect_peers_task: Optional[asyncio.Task[None]] = None self.serialize_task: Optional[asyncio.Task[None]] = None @@ -82,8 +82,8 @@ def __init__( except Exception: self.resolver = None self.log.exception("Error initializing asyncresolver") - self.pending_outbound_connections: Set[str] = set() - self.pending_tasks: Set[asyncio.Task[None]] = set() + self.pending_outbound_connections: set[str] = set() + self.pending_tasks: set[asyncio.Task[None]] = set() self.default_port: Optional[int] = default_port if default_port is None and selected_network in NETWORK_ID_DEFAULT_PORTS: self.default_port = NETWORK_ID_DEFAULT_PORTS[selected_network] @@ -201,7 +201,7 @@ async def _query_dns(self, dns_address: str) -> None: self.log.warning("Skipping DNS query: asyncresolver not initialized.") return for rdtype in ["A", "AAAA"]: - peers: List[TimestampedPeerInfo] = [] + peers: list[TimestampedPeerInfo] = [] result = await self.resolver.resolve(qname=dns_address, rdtype=rdtype, lifetime=30) for ip in result: peers.append( @@ -448,7 +448,7 @@ def _peer_has_wrong_network_port(self, port: uint16) -> bool: return port in NETWORK_ID_DEFAULT_PORTS.values() and port != self.default_port async def _add_peers_common( - self, peer_list: List[TimestampedPeerInfo], peer_src: Optional[PeerInfo], is_full_node: bool + self, peer_list: list[TimestampedPeerInfo], peer_src: Optional[PeerInfo], is_full_node: bool ) -> None: # Check if we got the peers from a full node or from the introducer. peers_adjusted_timestamp = [] @@ -503,8 +503,8 @@ def __init__( server: ChiaServer, target_outbound_count: int, peers_file_path: Path, - introducer_info: Dict[str, Any], - dns_servers: List[str], + introducer_info: dict[str, Any], + dns_servers: list[str], peer_connect_interval: int, selected_network: str, default_port: Optional[int], @@ -522,7 +522,7 @@ def __init__( log, ) self.relay_queue = asyncio.Queue() - self.neighbour_known_peers: Dict[PeerInfo, Set[str]] = {} + self.neighbour_known_peers: dict[PeerInfo, set[str]] = {} self.key = randbits(256) async def start(self) -> None: @@ -571,7 +571,7 @@ async def _periodically_self_advertise_and_clean_data(self) -> None: self.log.error(f"Exception in self advertise: {e}") self.log.error(f"Traceback: {traceback.format_exc()}") - async def add_peers_neighbour(self, peers: List[TimestampedPeerInfo], neighbour_info: PeerInfo) -> None: + async def add_peers_neighbour(self, peers: list[TimestampedPeerInfo], neighbour_info: PeerInfo) -> None: async with self.lock: for peer in peers: if neighbour_info not in self.neighbour_known_peers: @@ -604,7 +604,7 @@ async def request_peers(self, peer_info: PeerInfo) -> Optional[Message]: return None async def add_peers( - self, peer_list: List[TimestampedPeerInfo], peer_src: Optional[PeerInfo], is_full_node: bool + self, peer_list: list[TimestampedPeerInfo], peer_src: Optional[PeerInfo], is_full_node: bool ) -> None: try: await self._add_peers_common(peer_list, peer_src, is_full_node) @@ -683,8 +683,8 @@ def __init__( server: ChiaServer, target_outbound_count: int, peers_file_path: Path, - introducer_info: Dict[str, Any], - dns_servers: List[str], + introducer_info: dict[str, Any], + dns_servers: list[str], peer_connect_interval: int, selected_network: str, default_port: Optional[int], @@ -713,6 +713,6 @@ async def ensure_is_closed(self) -> None: await self._close_common() async def add_peers( - self, peer_list: List[TimestampedPeerInfo], peer_src: Optional[PeerInfo], is_full_node: bool + self, peer_list: list[TimestampedPeerInfo], peer_src: Optional[PeerInfo], is_full_node: bool ) -> None: await self._add_peers_common(peer_list, peer_src, is_full_node) diff --git a/chia/server/rate_limit_numbers.py b/chia/server/rate_limit_numbers.py index 1bef1d0abff9..521cf73ac134 100644 --- a/chia/server/rate_limit_numbers.py +++ b/chia/server/rate_limit_numbers.py @@ -4,12 +4,12 @@ import copy import dataclasses import functools -from typing import Any, Dict, List, Optional +from typing import Any, Optional from chia.protocols.protocol_message_types import ProtocolMessageTypes from chia.protocols.shared_protocol import Capability -compose_rate_limits_cache: Dict[int, Dict[str, Any]] = {} +compose_rate_limits_cache: dict[int, dict[str, Any]] = {} @dataclasses.dataclass(frozen=True) @@ -19,7 +19,7 @@ class RLSettings: max_total_size: Optional[int] = None # Max cumulative size of all requests in that period -def get_rate_limits_to_use(our_capabilities: List[Capability], peer_capabilities: List[Capability]) -> Dict[str, Any]: +def get_rate_limits_to_use(our_capabilities: list[Capability], peer_capabilities: list[Capability]) -> dict[str, Any]: # This will use the newest possible rate limits that both peers support. At this time there are only two # options, v1 and v2. @@ -35,14 +35,14 @@ def get_rate_limits_to_use(our_capabilities: List[Capability], peer_capabilities return rate_limits[1] -def compose_rate_limits(old_rate_limits: Dict[str, Any], new_rate_limits: Dict[str, Any]) -> Dict[str, Any]: +def compose_rate_limits(old_rate_limits: dict[str, Any], new_rate_limits: dict[str, Any]) -> dict[str, Any]: # Composes two rate limits dicts, so that the newer values override the older values - final_rate_limits: Dict[str, Any] = copy.deepcopy(new_rate_limits) - categories: List[str] = ["rate_limits_tx", "rate_limits_other"] - all_new_msgs_lists: List[List[ProtocolMessageTypes]] = [ + final_rate_limits: dict[str, Any] = copy.deepcopy(new_rate_limits) + categories: list[str] = ["rate_limits_tx", "rate_limits_other"] + all_new_msgs_lists: list[list[ProtocolMessageTypes]] = [ list(new_rate_limits[category].keys()) for category in categories ] - all_new_msgs: List[ProtocolMessageTypes] = functools.reduce(lambda a, b: a + b, all_new_msgs_lists) + all_new_msgs: list[ProtocolMessageTypes] = functools.reduce(lambda a, b: a + b, all_new_msgs_lists) for old_cat, mapping in old_rate_limits.items(): if old_cat in categories: for old_protocol_msg, old_rate_limit_value in mapping.items(): diff --git a/chia/server/rate_limits.py b/chia/server/rate_limits.py index 1602e760514c..948ab79f8025 100644 --- a/chia/server/rate_limits.py +++ b/chia/server/rate_limits.py @@ -4,7 +4,6 @@ import logging import time from collections import Counter -from typing import List from chia.protocols.protocol_message_types import ProtocolMessageTypes from chia.protocols.shared_protocol import Capability @@ -43,7 +42,7 @@ def __init__(self, incoming: bool, reset_seconds: int = 60, percentage_of_limit: self.non_tx_cumulative_size = 0 def process_msg_and_check( - self, message: Message, our_capabilities: List[Capability], peer_capabilities: List[Capability] + self, message: Message, our_capabilities: list[Capability], peer_capabilities: list[Capability] ) -> bool: """ Returns True if message can be processed successfully, false if a rate limit is passed. diff --git a/chia/server/server.py b/chia/server/server.py index 04e2948985e3..a0fa73927d6d 100644 --- a/chia/server/server.py +++ b/chia/server/server.py @@ -5,10 +5,11 @@ import ssl import time import traceback +from collections.abc import Awaitable from dataclasses import dataclass, field from ipaddress import IPv4Network, IPv6Network, ip_network from pathlib import Path -from typing import Any, Awaitable, Callable, Dict, List, Optional, Tuple, Union, cast +from typing import Any, Callable, Optional, Union, cast from aiohttp import ( ClientResponseError, @@ -116,7 +117,7 @@ def calculate_node_id(cert_path: Path) -> bytes32: class ChiaServer: _port: Optional[int] _local_type: NodeType - _local_capabilities_for_handshake: List[Tuple[uint16, str]] + _local_capabilities_for_handshake: list[tuple[uint16, str]] _ping_interval: int _network_id: str _inbound_rate_limit_percent: int @@ -124,13 +125,13 @@ class ChiaServer: api: ApiProtocol node: Any root_path: Path - config: Dict[str, Any] + config: dict[str, Any] log: logging.Logger ssl_context: ssl.SSLContext ssl_client_context: ssl.SSLContext node_id: bytes32 - exempt_peer_networks: List[Union[IPv4Network, IPv6Network]] - all_connections: Dict[bytes32, WSChiaConnection] = field(default_factory=dict) + exempt_peer_networks: list[Union[IPv4Network, IPv6Network]] + all_connections: dict[bytes32, WSChiaConnection] = field(default_factory=dict) on_connect: Optional[ConnectionCallback] = None shut_down_event: asyncio.Event = field(default_factory=asyncio.Event) introducer_peers: Optional[IntroducerPeers] = None @@ -138,7 +139,7 @@ class ChiaServer: webserver: Optional[WebServer] = None connection_close_task: Optional[asyncio.Task[None]] = None received_message_callback: Optional[ConnectionCallback] = None - banned_peers: Dict[str, float] = field(default_factory=dict) + banned_peers: dict[str, float] = field(default_factory=dict) invalid_protocol_ban_seconds = INVALID_PROTOCOL_BAN_SECONDS @classmethod @@ -152,11 +153,11 @@ def create( network_id: str, inbound_rate_limit_percent: int, outbound_rate_limit_percent: int, - capabilities: List[Tuple[uint16, str]], + capabilities: list[tuple[uint16, str]], root_path: Path, - config: Dict[str, Any], - private_ca_crt_key: Tuple[Path, Path], - chia_ca_crt_key: Tuple[Path, Path], + config: dict[str, Any], + private_ca_crt_key: tuple[Path, Path], + chia_ca_crt_key: tuple[Path, Path], name: str = __name__, ) -> ChiaServer: log = logging.getLogger(name) @@ -245,7 +246,7 @@ async def garbage_collect_connections_task(self) -> None: is_crawler = getattr(self.node, "crawl", None) while True: await asyncio.sleep(600 if is_crawler is None else 2) - to_remove: List[WSChiaConnection] = [] + to_remove: list[WSChiaConnection] = [] for connection in self.all_connections.values(): if connection.closed: to_remove.append(connection) @@ -558,7 +559,7 @@ async def connection_closed( if on_disconnect is not None: await on_disconnect(connection) - async def validate_broadcast_message_type(self, messages: List[Message], node_type: NodeType) -> None: + async def validate_broadcast_message_type(self, messages: list[Message], node_type: NodeType) -> None: for message in messages: if message_requires_reply(ProtocolMessageTypes(message.type)): # Internal protocol logic error - we will raise, blocking messages to all peers @@ -574,7 +575,7 @@ async def validate_broadcast_message_type(self, messages: List[Message], node_ty async def send_to_all( self, - messages: List[Message], + messages: list[Message], node_type: NodeType, exclude: Optional[bytes32] = None, ) -> None: @@ -586,7 +587,7 @@ async def send_to_all( async def send_to_all_if( self, - messages: List[Message], + messages: list[Message], node_type: NodeType, predicate: Callable[[WSChiaConnection], bool], exclude: Optional[bytes32] = None, @@ -597,7 +598,7 @@ async def send_to_all_if( for message in messages: await connection.send_message(message) - async def send_to_specific(self, messages: List[Message], node_id: bytes32) -> None: + async def send_to_specific(self, messages: list[Message], node_id: bytes32) -> None: if node_id in self.all_connections: connection = self.all_connections[node_id] for message in messages: @@ -614,7 +615,7 @@ async def call_api_of_specific( def get_connections( self, node_type: Optional[NodeType] = None, *, outbound: Optional[bool] = None - ) -> List[WSChiaConnection]: + ) -> list[WSChiaConnection]: result = [] for _, connection in self.all_connections.items(): node_type_match = node_type is None or connection.connection_type == node_type @@ -707,7 +708,7 @@ def accept_inbound_connections(self, node_type: NodeType) -> bool: return inbound_count < cast(int, self.config.get("max_inbound_timelord", 5)) return True - def is_trusted_peer(self, peer: WSChiaConnection, trusted_peers: Dict[str, Any]) -> bool: + def is_trusted_peer(self, peer: WSChiaConnection, trusted_peers: dict[str, Any]) -> bool: return is_trusted_peer( host=peer.peer_info.host, node_id=peer.peer_node_id, @@ -716,5 +717,5 @@ def is_trusted_peer(self, peer: WSChiaConnection, trusted_peers: Dict[str, Any]) testing=self.config.get("testing", False), ) - def set_capabilities(self, capabilities: List[Tuple[uint16, str]]) -> None: + def set_capabilities(self, capabilities: list[tuple[uint16, str]]) -> None: self._local_capabilities_for_handshake = capabilities diff --git a/chia/server/signal_handlers.py b/chia/server/signal_handlers.py index 0ee6253af347..26630f619679 100644 --- a/chia/server/signal_handlers.py +++ b/chia/server/signal_handlers.py @@ -6,8 +6,9 @@ import functools import signal import sys +from collections.abc import AsyncIterator from types import FrameType -from typing import AsyncIterator, List, Optional, final +from typing import Optional, final from typing_extensions import Protocol @@ -33,7 +34,7 @@ async def __call__( @final @dataclasses.dataclass class SignalHandlers: - tasks: List[asyncio.Task[None]] = dataclasses.field(default_factory=list) + tasks: list[asyncio.Task[None]] = dataclasses.field(default_factory=list) @classmethod @contextlib.asynccontextmanager diff --git a/chia/server/ssl_context.py b/chia/server/ssl_context.py index 2dff0c01ac83..a7211c6a45b3 100644 --- a/chia/server/ssl_context.py +++ b/chia/server/ssl_context.py @@ -1,31 +1,31 @@ from __future__ import annotations from pathlib import Path -from typing import Any, Dict, Tuple +from typing import Any -def public_ssl_paths(path: Path, config: Dict[str, Any]) -> Tuple[Path, Path]: +def public_ssl_paths(path: Path, config: dict[str, Any]) -> tuple[Path, Path]: return ( path / config["ssl"]["public_crt"], path / config["ssl"]["public_key"], ) -def private_ssl_paths(path: Path, config: Dict[str, Any]) -> Tuple[Path, Path]: +def private_ssl_paths(path: Path, config: dict[str, Any]) -> tuple[Path, Path]: return ( path / config["ssl"]["private_crt"], path / config["ssl"]["private_key"], ) -def private_ssl_ca_paths(path: Path, config: Dict[str, Any]) -> Tuple[Path, Path]: +def private_ssl_ca_paths(path: Path, config: dict[str, Any]) -> tuple[Path, Path]: return ( path / config["private_ssl_ca"]["crt"], path / config["private_ssl_ca"]["key"], ) -def chia_ssl_ca_paths(path: Path, config: Dict[str, Any]) -> Tuple[Path, Path]: +def chia_ssl_ca_paths(path: Path, config: dict[str, Any]) -> tuple[Path, Path]: return ( path / config["chia_ssl_ca"]["crt"], path / config["chia_ssl_ca"]["key"], diff --git a/chia/server/start_data_layer.py b/chia/server/start_data_layer.py index cad0637b11ef..db0bfabfd085 100644 --- a/chia/server/start_data_layer.py +++ b/chia/server/start_data_layer.py @@ -3,7 +3,7 @@ import logging import pathlib import sys -from typing import Any, Dict, List, Optional, cast +from typing import Any, Optional, cast from chia.data_layer.data_layer import DataLayer from chia.data_layer.data_layer_api import DataLayerAPI @@ -31,9 +31,9 @@ # TODO: Review need for config and if retained then hint it properly. def create_data_layer_service( root_path: pathlib.Path, - config: Dict[str, Any], - downloaders: List[PluginRemote], - uploaders: List[PluginRemote], # dont add FilesystemUploader to this, it is the default uploader + config: dict[str, Any], + downloaders: list[PluginRemote], + uploaders: list[PluginRemote], # dont add FilesystemUploader to this, it is the default uploader wallet_service: Optional[WalletService] = None, connect_to_daemon: bool = True, ) -> DataLayerService: @@ -106,7 +106,7 @@ async def async_main() -> int: old_uploaders = config["data_layer"].get("uploaders", []) new_uploaders = plugins_config.get("uploaders", []) conf_file_uploaders = await load_plugin_configurations(service_dir, "uploaders", log) - uploaders: List[PluginRemote] = [ + uploaders: list[PluginRemote] = [ *(PluginRemote(url=url) for url in old_uploaders), *(PluginRemote.unmarshal(marshalled=marshalled) for marshalled in new_uploaders), *conf_file_uploaders, @@ -115,7 +115,7 @@ async def async_main() -> int: old_downloaders = config["data_layer"].get("downloaders", []) new_downloaders = plugins_config.get("downloaders", []) conf_file_uploaders = await load_plugin_configurations(service_dir, "downloaders", log) - downloaders: List[PluginRemote] = [ + downloaders: list[PluginRemote] = [ *(PluginRemote(url=url) for url in old_downloaders), *(PluginRemote.unmarshal(marshalled=marshalled) for marshalled in new_downloaders), *conf_file_uploaders, diff --git a/chia/server/start_farmer.py b/chia/server/start_farmer.py index 780d5f80d222..0c9e0b82b75e 100644 --- a/chia/server/start_farmer.py +++ b/chia/server/start_farmer.py @@ -2,7 +2,7 @@ import pathlib import sys -from typing import Any, Dict, Optional +from typing import Any, Optional from chia.consensus.constants import ConsensusConstants, replace_str_to_bytes from chia.consensus.default_constants import DEFAULT_CONSTANTS, update_testnet_overrides @@ -26,8 +26,8 @@ def create_farmer_service( root_path: pathlib.Path, - config: Dict[str, Any], - config_pool: Dict[str, Any], + config: dict[str, Any], + config_pool: dict[str, Any], consensus_constants: ConsensusConstants, keychain: Optional[Keychain] = None, connect_to_daemon: bool = True, diff --git a/chia/server/start_full_node.py b/chia/server/start_full_node.py index 974f63a4b8da..f439f5354f90 100644 --- a/chia/server/start_full_node.py +++ b/chia/server/start_full_node.py @@ -5,7 +5,7 @@ import pathlib import sys from multiprocessing import freeze_support -from typing import Any, Dict, List, Optional, Tuple +from typing import Any, Optional from chia.consensus.constants import ConsensusConstants, replace_str_to_bytes from chia.consensus.default_constants import DEFAULT_CONSTANTS, update_testnet_overrides @@ -31,10 +31,10 @@ async def create_full_node_service( root_path: pathlib.Path, - config: Dict[str, Any], + config: dict[str, Any], consensus_constants: ConsensusConstants, connect_to_daemon: bool = True, - override_capabilities: Optional[List[Tuple[uint16, str]]] = None, + override_capabilities: Optional[list[tuple[uint16, str]]] = None, ) -> FullNodeService: service_config = config[SERVICE_NAME] @@ -70,7 +70,7 @@ async def create_full_node_service( ) -async def async_main(service_config: Dict[str, Any]) -> int: +async def async_main(service_config: dict[str, Any]) -> int: # TODO: refactor to avoid the double load config = load_config(DEFAULT_ROOT_PATH, "config.yaml") config[SERVICE_NAME] = service_config diff --git a/chia/server/start_harvester.py b/chia/server/start_harvester.py index ed47b9c8dd29..896270edd3f4 100644 --- a/chia/server/start_harvester.py +++ b/chia/server/start_harvester.py @@ -2,7 +2,7 @@ import pathlib import sys -from typing import Any, Dict, Optional, Set +from typing import Any, Optional from chia.consensus.constants import ConsensusConstants, replace_str_to_bytes from chia.consensus.default_constants import DEFAULT_CONSTANTS @@ -26,9 +26,9 @@ def create_harvester_service( root_path: pathlib.Path, - config: Dict[str, Any], + config: dict[str, Any], consensus_constants: ConsensusConstants, - farmer_peers: Set[UnresolvedPeerInfo], + farmer_peers: set[UnresolvedPeerInfo], connect_to_daemon: bool = True, ) -> HarvesterService: service_config = config[SERVICE_NAME] diff --git a/chia/server/start_introducer.py b/chia/server/start_introducer.py index f9bba0eae58e..134c646dc9cb 100644 --- a/chia/server/start_introducer.py +++ b/chia/server/start_introducer.py @@ -2,7 +2,7 @@ import pathlib import sys -from typing import Any, Dict, Optional +from typing import Any, Optional from chia.introducer.introducer import Introducer from chia.introducer.introducer_api import IntroducerAPI @@ -22,7 +22,7 @@ def create_introducer_service( root_path: pathlib.Path, - config: Dict[str, Any], + config: dict[str, Any], advertised_port: Optional[int] = None, connect_to_daemon: bool = True, ) -> IntroducerService: diff --git a/chia/server/start_service.py b/chia/server/start_service.py index 2accd724a38b..d140614d0412 100644 --- a/chia/server/start_service.py +++ b/chia/server/start_service.py @@ -6,24 +6,10 @@ import logging.config import os import signal +from collections.abc import AsyncIterator, Awaitable, Coroutine from pathlib import Path from types import FrameType -from typing import ( - Any, - AsyncIterator, - Awaitable, - Callable, - Coroutine, - Dict, - Generic, - List, - Optional, - Set, - Tuple, - Type, - TypeVar, - cast, -) +from typing import Any, Callable, Generic, Optional, TypeVar, cast from chia.daemon.server import service_launch_lock_path from chia.rpc.rpc_server import RpcApiProtocol, RpcServer, RpcServiceProtocol, start_rpc_server @@ -54,7 +40,7 @@ _T_ApiProtocol = TypeVar("_T_ApiProtocol", bound=ApiProtocol) _T_RpcApiProtocol = TypeVar("_T_RpcApiProtocol", bound=RpcApiProtocol) -RpcInfo = Tuple[Type[_T_RpcApiProtocol], int] +RpcInfo = tuple[type[_T_RpcApiProtocol], int] log = logging.getLogger(__name__) @@ -74,14 +60,14 @@ def __init__( service_name: str, network_id: str, *, - config: Dict[str, Any], - upnp_ports: Optional[List[int]] = None, - connect_peers: Optional[Set[UnresolvedPeerInfo]] = None, + config: dict[str, Any], + upnp_ports: Optional[list[int]] = None, + connect_peers: Optional[set[UnresolvedPeerInfo]] = None, on_connect_callback: Optional[Callable[[WSChiaConnection], Awaitable[None]]] = None, rpc_info: Optional[RpcInfo[_T_RpcApiProtocol]] = None, connect_to_daemon: bool = True, max_request_body_size: Optional[int] = None, - override_capabilities: Optional[List[Tuple[uint16, str]]] = None, + override_capabilities: Optional[list[tuple[uint16, str]]] = None, ) -> None: if upnp_ports is None: upnp_ports = [] @@ -117,7 +103,7 @@ def __init__( if node_type == NodeType.WALLET: inbound_rlp = self.service_config.get("inbound_rate_limit_percent", inbound_rlp) outbound_rlp = 60 - capabilities_to_use: List[Tuple[uint16, str]] = default_capabilities[node_type] + capabilities_to_use: list[tuple[uint16, str]] = default_capabilities[node_type] if override_capabilities is not None: capabilities_to_use = override_capabilities @@ -158,7 +144,7 @@ def __init__( self.stop_requested = asyncio.Event() async def _connect_peers_task_handler(self) -> None: - resolved_peers: Dict[UnresolvedPeerInfo, PeerInfo] = {} + resolved_peers: dict[UnresolvedPeerInfo, PeerInfo] = {} prefer_ipv6 = self.config.get("prefer_ipv6", False) while True: for unresolved in self._connect_peers: diff --git a/chia/server/start_timelord.py b/chia/server/start_timelord.py index 3f031666bee5..fe3efe603f3e 100644 --- a/chia/server/start_timelord.py +++ b/chia/server/start_timelord.py @@ -3,7 +3,7 @@ import logging import pathlib import sys -from typing import Any, Dict, Optional +from typing import Any, Optional from chia.consensus.constants import ConsensusConstants, replace_str_to_bytes from chia.consensus.default_constants import DEFAULT_CONSTANTS @@ -29,7 +29,7 @@ def create_timelord_service( root_path: pathlib.Path, - config: Dict[str, Any], + config: dict[str, Any], constants: ConsensusConstants, connect_to_daemon: bool = True, ) -> TimelordService: diff --git a/chia/server/start_wallet.py b/chia/server/start_wallet.py index 5cacd2968412..7bc8196c059c 100644 --- a/chia/server/start_wallet.py +++ b/chia/server/start_wallet.py @@ -4,7 +4,7 @@ import pathlib import sys from multiprocessing import freeze_support -from typing import Any, Dict, Optional +from typing import Any, Optional from chia.consensus.constants import ConsensusConstants, replace_str_to_bytes from chia.consensus.default_constants import DEFAULT_CONSTANTS @@ -30,7 +30,7 @@ def create_wallet_service( root_path: pathlib.Path, - config: Dict[str, Any], + config: dict[str, Any], consensus_constants: ConsensusConstants, keychain: Optional[Keychain] = None, connect_to_daemon: bool = True, diff --git a/chia/server/upnp.py b/chia/server/upnp.py index 780dbd3aaf1b..b698c6465c67 100644 --- a/chia/server/upnp.py +++ b/chia/server/upnp.py @@ -3,9 +3,10 @@ import contextlib import logging import threading +from collections.abc import Iterator from dataclasses import dataclass, field from queue import Queue -from typing import Iterator, List, Optional, Tuple, Union +from typing import Optional, Union from typing_extensions import Literal @@ -24,13 +25,13 @@ @dataclass class UPnP: _thread: Optional[threading.Thread] = None - _queue: Queue[Union[Tuple[Literal["remap", "release"], int], Tuple[Literal["shutdown"]]]] = field( + _queue: Queue[Union[tuple[Literal["remap", "release"], int], tuple[Literal["shutdown"]]]] = field( default_factory=Queue, ) _upnp: Optional[miniupnpc.UPnP] = None @contextlib.contextmanager - def manage(self, ports: List[int]) -> Iterator[None]: + def manage(self, ports: list[int]) -> Iterator[None]: self.setup() try: for port in ports: diff --git a/chia/server/ws_connection.py b/chia/server/ws_connection.py index 7d1e3c3b5b78..40c634385481 100644 --- a/chia/server/ws_connection.py +++ b/chia/server/ws_connection.py @@ -5,8 +5,9 @@ import math import time import traceback +from collections.abc import Awaitable from dataclasses import dataclass, field -from typing import Any, Awaitable, Callable, Dict, List, Optional, Set, Tuple, Union +from typing import Any, Callable, Optional, Union from aiohttp import ClientSession, WebSocketError, WSCloseCode, WSMessage, WSMsgType from aiohttp.client import ClientWebSocketResponse @@ -47,7 +48,7 @@ error_response_version = Version("0.0.35") -def create_default_last_message_time_dict() -> Dict[ProtocolMessageTypes, float]: +def create_default_last_message_time_dict() -> dict[ProtocolMessageTypes, float]: return {message_type: -math.inf for message_type in ProtocolMessageTypes} @@ -73,8 +74,8 @@ class WSChiaConnection: api: ApiProtocol = field(repr=False) local_type: NodeType local_port: Optional[int] - local_capabilities_for_handshake: List[Tuple[uint16, str]] = field(repr=False) - local_capabilities: List[Capability] + local_capabilities_for_handshake: list[tuple[uint16, str]] = field(repr=False) + local_capabilities: list[Capability] peer_info: PeerInfo peer_node_id: bytes32 log: logging.Logger = field(repr=False) @@ -90,9 +91,9 @@ class WSChiaConnection: received_message_callback: Optional[ConnectionCallback] = field(repr=False) incoming_queue: asyncio.Queue[Message] = field(default_factory=asyncio.Queue, repr=False) outgoing_queue: asyncio.Queue[Message] = field(default_factory=asyncio.Queue, repr=False) - api_tasks: Dict[bytes32, asyncio.Task[None]] = field(default_factory=dict, repr=False) + api_tasks: dict[bytes32, asyncio.Task[None]] = field(default_factory=dict, repr=False) # Contains task ids of api tasks which should not be canceled - execute_tasks: Set[bytes32] = field(default_factory=set, repr=False) + execute_tasks: set[bytes32] = field(default_factory=set, repr=False) # ChiaConnection metrics creation_time: float = field(default_factory=time.time) @@ -107,17 +108,17 @@ class WSChiaConnection: _close_event: asyncio.Event = field(default_factory=asyncio.Event, repr=False) session: Optional[ClientSession] = field(default=None, repr=False) - pending_requests: Dict[uint16, asyncio.Event] = field(default_factory=dict, repr=False) - request_results: Dict[uint16, Message] = field(default_factory=dict, repr=False) + pending_requests: dict[uint16, asyncio.Event] = field(default_factory=dict, repr=False) + request_results: dict[uint16, Message] = field(default_factory=dict, repr=False) closed: bool = False connection_type: Optional[NodeType] = None request_nonce: uint16 = uint16(0) - peer_capabilities: List[Capability] = field(default_factory=list) + peer_capabilities: list[Capability] = field(default_factory=list) # Used by the Chia Seeder. version: str = field(default_factory=str) protocol_version: Version = field(default_factory=lambda: Version("0")) - log_rate_limit_last_time: Dict[ProtocolMessageTypes, float] = field( + log_rate_limit_last_time: dict[ProtocolMessageTypes, float] = field( default_factory=create_default_last_message_time_dict, repr=False, ) @@ -136,7 +137,7 @@ def create( peer_id: bytes32, inbound_rate_limit_percent: int, outbound_rate_limit_percent: int, - local_capabilities_for_handshake: List[Tuple[uint16, str]], + local_capabilities_for_handshake: list[tuple[uint16, str]], session: Optional[ClientSession] = None, ) -> WSChiaConnection: assert ws._writer is not None diff --git a/chia/simulator/block_tools.py b/chia/simulator/block_tools.py index 29fff0af1f36..809f1419c35b 100644 --- a/chia/simulator/block_tools.py +++ b/chia/simulator/block_tools.py @@ -10,10 +10,11 @@ import sys import tempfile import time +from collections.abc import Sequence from dataclasses import dataclass, replace from pathlib import Path from random import Random -from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple +from typing import Any, Callable, Optional import anyio from chia_rs import ALLOW_BACKREFS, MEMPOOL_MODE, AugSchemeMPL, G1Element, G2Element, PrivateKey, solution_generator @@ -147,8 +148,8 @@ ) -def compute_additions_unchecked(sb: SpendBundle) -> List[Coin]: - ret: List[Coin] = [] +def compute_additions_unchecked(sb: SpendBundle) -> list[Coin]: + ret: list[Coin] = [] for cs in sb.coin_spends: parent_id = cs.coin.name() _, r = cs.puzzle_reveal.run_with_cost(INFINITE_COST, cs.solution) @@ -163,14 +164,14 @@ def compute_additions_unchecked(sb: SpendBundle) -> List[Coin]: return ret -def make_spend_bundle(coins: List[Coin], wallet: WalletTool, rng: Random) -> Tuple[SpendBundle, List[Coin]]: +def make_spend_bundle(coins: list[Coin], wallet: WalletTool, rng: Random) -> tuple[SpendBundle, list[Coin]]: """ makes a new spend bundle (block generator) spending some of the coins in the list of coins. The list will be updated to have spent coins removed and new coins appended. """ - new_coins: List[Coin] = [] - spend_bundles: List[SpendBundle] = [] + new_coins: list[Coin] = [] + spend_bundles: list[SpendBundle] = [] to_spend = rng.sample(coins, min(5, len(coins))) receiver = wallet.get_new_puzzlehash() for c in to_spend: @@ -190,16 +191,16 @@ class BlockTools: """ _block_cache_header: bytes32 - _block_cache_height_to_hash: Dict[uint32, bytes32] + _block_cache_height_to_hash: dict[uint32, bytes32] _block_cache_difficulty: uint64 - _block_cache: Dict[bytes32, BlockRecord] + _block_cache: dict[bytes32, BlockRecord] def __init__( self, constants: ConsensusConstants = test_constants, root_path: Optional[Path] = None, keychain: Optional[Keychain] = None, - config_overrides: Optional[Dict[str, Any]] = None, + config_overrides: Optional[dict[str, Any]] = None, automated_testing: bool = True, plot_dir: str = "test-plots", log: logging.Logger = logging.getLogger(__name__), @@ -214,7 +215,7 @@ def __init__( self.root_path = root_path self.log = log self.local_keychain = keychain - self.local_sk_cache: Dict[bytes32, Tuple[PrivateKey, Any]] = {} + self.local_sk_cache: dict[bytes32, tuple[PrivateKey, Any]] = {} self.automated_testing = automated_testing self.plot_dir_name = plot_dir @@ -270,7 +271,7 @@ def __init__( self.temp_dir: Path = get_plot_tmp_dir(self.plot_dir_name, self.automated_testing) self.plot_dir.mkdir(parents=True, exist_ok=True) self.temp_dir.mkdir(parents=True, exist_ok=True) - self.expected_plots: Dict[bytes32, Path] = {} + self.expected_plots: dict[bytes32, Path] = {} self.created_plots: int = 0 self.total_result = PlotRefreshResult() @@ -342,12 +343,12 @@ async def setup_keys(self, fingerprint: Optional[int] = None, reward_ph: Optiona self.farmer_ph = reward_ph self.pool_ph = reward_ph if self.automated_testing: - self.all_sks: List[PrivateKey] = [sk for sk, _ in await keychain_proxy.get_all_private_keys()] + self.all_sks: list[PrivateKey] = [sk for sk, _ in await keychain_proxy.get_all_private_keys()] else: self.all_sks = [self.farmer_master_sk] # we only want to include plots under the same fingerprint - self.pool_pubkeys: List[G1Element] = [master_sk_to_pool_sk(sk).get_g1() for sk in self.all_sks] + self.pool_pubkeys: list[G1Element] = [master_sk_to_pool_sk(sk).get_g1() for sk in self.all_sks] - self.farmer_pubkeys: List[G1Element] = [master_sk_to_farmer_sk(sk).get_g1() for sk in self.all_sks] + self.farmer_pubkeys: list[G1Element] = [master_sk_to_farmer_sk(sk).get_g1() for sk in self.all_sks] if len(self.pool_pubkeys) == 0 or len(self.farmer_pubkeys) == 0: raise RuntimeError("Keys not generated. Run `chia keys generate`") @@ -356,7 +357,7 @@ async def setup_keys(self, fingerprint: Optional[int] = None, reward_ph: Optiona if keychain_proxy is not None: await keychain_proxy.close() # close the keychain proxy - def change_config(self, new_config: Dict[str, Any]) -> None: + def change_config(self, new_config: dict[str, Any]) -> None: self._config = new_config overrides = self._config["network_overrides"]["constants"][self._config["selected_network"]] updated_constants = replace_str_to_bytes(self.constants, **overrides) @@ -505,7 +506,7 @@ async def delete_plot(self, plot_id: bytes32) -> None: await self.refresh_plots() @property - def config(self) -> Dict[str, Any]: + def config(self) -> dict[str, Any]: return copy.deepcopy(self._config) def get_daemon_ssl_context(self) -> ssl.SSLContext: @@ -567,7 +568,7 @@ def get_pool_wallet_tool(self) -> WalletTool: def get_consecutive_blocks( self, num_blocks: int, - block_list_input: Optional[List[FullBlock]] = None, + block_list_input: Optional[list[FullBlock]] = None, *, farmer_reward_puzzle_hash: Optional[bytes32] = None, pool_reward_puzzle_hash: Optional[bytes32] = None, @@ -583,21 +584,21 @@ def get_consecutive_blocks( normalized_to_identity_cc_sp: bool = False, normalized_to_identity_cc_ip: bool = False, current_time: bool = False, - block_refs: List[uint32] = [], + block_refs: list[uint32] = [], genesis_timestamp: Optional[uint64] = None, force_plot_id: Optional[bytes32] = None, dummy_block_references: bool = False, include_transactions: bool = False, skip_overflow: bool = False, min_signage_point: int = -1, - ) -> List[FullBlock]: + ) -> list[FullBlock]: assert num_blocks > 0 if block_list_input is not None: block_list = block_list_input.copy() else: block_list = [] - tx_block_heights: List[uint32] = [] + tx_block_heights: list[uint32] = [] if dummy_block_references: # block references can only point to transaction blocks, so we need # to record which ones are @@ -610,8 +611,8 @@ def get_consecutive_blocks( if time_per_block is None: time_per_block = float(constants.SUB_SLOT_TIME_TARGET) / float(constants.SLOT_BLOCKS_TARGET) - available_coins: List[Coin] = [] - pending_rewards: List[Coin] = [] + available_coins: list[Coin] = [] + pending_rewards: list[Coin] = [] wallet: Optional[WalletTool] = None rng: Optional[Random] = None if include_transactions: @@ -658,7 +659,7 @@ def get_consecutive_blocks( if num_blocks == 0: return block_list - blocks: Dict[bytes32, BlockRecord] + blocks: dict[bytes32, BlockRecord] if block_list[-1].header_hash == self._block_cache_header: height_to_hash = self._block_cache_height_to_hash difficulty = self._block_cache_difficulty @@ -681,8 +682,8 @@ def get_consecutive_blocks( curr = blocks[curr.prev_hash] blocks_added_this_sub_slot += 1 - finished_sub_slots_at_sp: List[EndOfSubSlotBundle] = [] # Sub-slots since last block, up to signage point - finished_sub_slots_at_ip: List[EndOfSubSlotBundle] = [] # Sub-slots since last block, up to infusion point + finished_sub_slots_at_sp: list[EndOfSubSlotBundle] = [] # Sub-slots since last block, up to signage point + finished_sub_slots_at_ip: list[EndOfSubSlotBundle] = [] # Sub-slots since last block, up to infusion point sub_slot_iters: uint64 = latest_block.sub_slot_iters # The number of iterations in one sub-slot same_slot_as_last = True # Only applies to first slot, to prevent old blocks from being added sub_slot_start_total_iters: uint128 = latest_block.ip_sub_slot_total_iters(constants) @@ -741,7 +742,7 @@ def get_consecutive_blocks( assert signage_point.cc_vdf is not None cc_sp_output_hash = signage_point.cc_vdf.output.get_hash() - qualified_proofs: List[Tuple[uint64, ProofOfSpace]] = self.get_pospaces_for_challenge( + qualified_proofs: list[tuple[uint64, ProofOfSpace]] = self.get_pospaces_for_challenge( constants, slot_cc_challenge, cc_sp_output_hash, @@ -1236,7 +1237,7 @@ def create_genesis_block( if timestamp is None: timestamp = uint64(int(time.time())) - finished_sub_slots: List[EndOfSubSlotBundle] = [] + finished_sub_slots: list[EndOfSubSlotBundle] = [] unfinished_block: Optional[UnfinishedBlock] = None ip_iters: uint64 = uint64(0) sub_slot_total_iters: uint128 = uint128(0) @@ -1262,7 +1263,7 @@ def create_genesis_block( cc_sp_output_hash = signage_point.cc_vdf.output.get_hash() # If did not reach the target slots to skip, don't make any proofs for this sub-slot # we're creating the genesis block, its height is always 0 - qualified_proofs: List[Tuple[uint64, ProofOfSpace]] = self.get_pospaces_for_challenge( + qualified_proofs: list[tuple[uint64, ProofOfSpace]] = self.get_pospaces_for_challenge( constants, cc_challenge, cc_sp_output_hash, @@ -1420,8 +1421,8 @@ def get_pospaces_for_challenge( sub_slot_iters: uint64, height: uint32, force_plot_id: Optional[bytes32] = None, - ) -> List[Tuple[uint64, ProofOfSpace]]: - found_proofs: List[Tuple[uint64, ProofOfSpace]] = [] + ) -> list[tuple[uint64, ProofOfSpace]]: + found_proofs: list[tuple[uint64, ProofOfSpace]] = [] rng = random.Random() rng.seed(seed) for plot_info in self.plot_manager.plots.values(): @@ -1481,7 +1482,7 @@ def get_signage_point( latest_block: Optional[BlockRecord], sub_slot_start_total_iters: uint128, signage_point_index: uint8, - finished_sub_slots: List[EndOfSubSlotBundle], + finished_sub_slots: list[EndOfSubSlotBundle], sub_slot_iters: uint64, normalized_to_identity_cc_sp: bool = False, ) -> SignagePoint: @@ -1536,9 +1537,9 @@ def get_signage_point( def finish_block( constants: ConsensusConstants, - blocks: Dict[bytes32, BlockRecord], - height_to_hash: Dict[uint32, bytes32], - finished_sub_slots: List[EndOfSubSlotBundle], + blocks: dict[bytes32, BlockRecord], + height_to_hash: dict[uint32, bytes32], + finished_sub_slots: list[EndOfSubSlotBundle], sub_slot_start_total_iters: uint128, signage_point_index: uint8, unfinished_block: UnfinishedBlock, @@ -1550,7 +1551,7 @@ def finish_block( sub_slot_iters: uint64, difficulty: uint64, normalized_to_identity_cc_ip: bool = False, -) -> Tuple[FullBlock, BlockRecord]: +) -> tuple[FullBlock, BlockRecord]: is_overflow = is_overflow_block(constants, signage_point_index) cc_vdf_challenge = slot_cc_challenge if len(finished_sub_slots) == 0: @@ -1627,10 +1628,10 @@ def finish_block( def get_challenges( constants: ConsensusConstants, - blocks: Dict[bytes32, BlockRecord], - finished_sub_slots: List[EndOfSubSlotBundle], + blocks: dict[bytes32, BlockRecord], + finished_sub_slots: list[EndOfSubSlotBundle], prev_header_hash: Optional[bytes32], -) -> Tuple[bytes32, bytes32]: +) -> tuple[bytes32, bytes32]: if len(finished_sub_slots) == 0: if prev_header_hash is None: return constants.GENESIS_CHALLENGE, constants.GENESIS_CHALLENGE @@ -1670,12 +1671,12 @@ def get_plot_tmp_dir(plot_dir_name: str = "test-plots", automated_testing: bool def load_block_list( - block_list: List[FullBlock], constants: ConsensusConstants -) -> Tuple[Dict[uint32, bytes32], uint64, Dict[bytes32, BlockRecord]]: + block_list: list[FullBlock], constants: ConsensusConstants +) -> tuple[dict[uint32, bytes32], uint64, dict[bytes32, BlockRecord]]: difficulty = uint64(constants.DIFFICULTY_STARTING) sub_slot_iters = uint64(constants.SUB_SLOT_ITERS_STARTING) - height_to_hash: Dict[uint32, bytes32] = {} - blocks: Dict[bytes32, BlockRecord] = {} + height_to_hash: dict[uint32, bytes32] = {} + blocks: dict[bytes32, BlockRecord] = {} for full_block in block_list: if full_block.height != 0: if len(full_block.finished_sub_slots) > 0: @@ -1716,12 +1717,12 @@ def load_block_list( def get_icc( constants: ConsensusConstants, vdf_end_total_iters: uint128, - finished_sub_slots: List[EndOfSubSlotBundle], + finished_sub_slots: list[EndOfSubSlotBundle], latest_block: BlockRecord, - blocks: Dict[bytes32, BlockRecord], + blocks: dict[bytes32, BlockRecord], sub_slot_start_total_iters: uint128, deficit: uint8, -) -> Tuple[Optional[VDFInfo], Optional[VDFProof]]: +) -> tuple[Optional[VDFInfo], Optional[VDFProof]]: if len(finished_sub_slots) == 0: prev_deficit = latest_block.deficit else: @@ -1772,7 +1773,7 @@ def get_icc( def get_full_block_and_block_record( constants: ConsensusConstants, - blocks: Dict[bytes32, BlockRecord], + blocks: dict[bytes32, BlockRecord], sub_slot_start_total_iters: uint128, signage_point_index: uint8, proof_of_space: ProofOfSpace, @@ -1785,25 +1786,25 @@ def get_full_block_and_block_record( time_per_block: float, block_generator: Optional[BlockGenerator], aggregate_signature: G2Element, - additions: Optional[List[Coin]], - removals: Optional[List[Coin]], - height_to_hash: Dict[uint32, bytes32], + additions: Optional[list[Coin]], + removals: Optional[list[Coin]], + height_to_hash: dict[uint32, bytes32], difficulty: uint64, required_iters: uint64, sub_slot_iters: uint64, get_plot_signature: Callable[[bytes32, G1Element], G2Element], get_pool_signature: Callable[[PoolTarget, Optional[G1Element]], Optional[G2Element]], - finished_sub_slots: List[EndOfSubSlotBundle], + finished_sub_slots: list[EndOfSubSlotBundle], signage_point: SignagePoint, prev_block: BlockRecord, seed: bytes = b"", *, - block_refs: List[uint32] = [], + block_refs: list[uint32] = [], overflow_cc_challenge: Optional[bytes32] = None, overflow_rc_challenge: Optional[bytes32] = None, normalized_to_identity_cc_ip: bool = False, current_time: bool = False, -) -> Tuple[FullBlock, BlockRecord, float]: +) -> tuple[FullBlock, BlockRecord, float]: # we're simulating time between blocks here. The more VDF iterations the # blocks advances, the longer it should have taken (and vice versa). This # formula is meant to converge at 1024 iters per the specified @@ -1870,7 +1871,7 @@ def get_full_block_and_block_record( # these are the costs of unknown conditions, as defined chia_rs here: # https://github.com/Chia-Network/chia_rs/pull/181 -def compute_cost_table() -> List[int]: +def compute_cost_table() -> list[int]: A = 17 B = 16 s = [] @@ -1997,7 +1998,7 @@ async def create_block_tools_async( constants: ConsensusConstants = test_constants, root_path: Optional[Path] = None, keychain: Optional[Keychain] = None, - config_overrides: Optional[Dict[str, Any]] = None, + config_overrides: Optional[dict[str, Any]] = None, num_og_plots: int = 15, num_pool_plots: int = 5, num_non_keychain_plots: int = 3, @@ -2020,7 +2021,7 @@ def create_block_tools( constants: ConsensusConstants = test_constants, root_path: Optional[Path] = None, keychain: Optional[Keychain] = None, - config_overrides: Optional[Dict[str, Any]] = None, + config_overrides: Optional[dict[str, Any]] = None, ) -> BlockTools: global create_block_tools_count create_block_tools_count += 1 diff --git a/chia/simulator/full_node_simulator.py b/chia/simulator/full_node_simulator.py index 8c6fa57c59fa..e0330a691c0b 100644 --- a/chia/simulator/full_node_simulator.py +++ b/chia/simulator/full_node_simulator.py @@ -3,7 +3,8 @@ import asyncio import itertools import time -from typing import Any, Collection, Dict, List, Optional, Set, Tuple, Union +from collections.abc import Collection +from typing import Any, Optional, Union import anyio @@ -43,7 +44,7 @@ class _Default: timeout_per_block = 5 -async def wait_for_coins_in_wallet(coins: Set[Coin], wallet: Wallet, timeout: Optional[float] = 5): +async def wait_for_coins_in_wallet(coins: set[Coin], wallet: Wallet, timeout: Optional[float] = 5): """Wait until all of the specified coins are simultaneously reported as spendable by the wallet. @@ -65,7 +66,7 @@ async def wait_for_coins_in_wallet(coins: Set[Coin], wallet: Wallet, timeout: Op class FullNodeSimulator(FullNodeAPI): - def __init__(self, full_node: FullNode, block_tools: BlockTools, config: Dict) -> None: + def __init__(self, full_node: FullNode, block_tools: BlockTools, config: dict) -> None: super().__init__(full_node) self.bt = block_tools self.full_node = full_node @@ -75,10 +76,10 @@ def __init__(self, full_node: FullNode, block_tools: BlockTools, config: Dict) - self.use_current_time: bool = self.config.get("simulator", {}).get("use_current_time", False) self.auto_farm: bool = self.config.get("simulator", {}).get("auto_farm", False) - def get_connections(self, request_node_type: Optional[NodeType]) -> List[Dict[str, Any]]: + def get_connections(self, request_node_type: Optional[NodeType]) -> list[dict[str, Any]]: return default_get_connections(server=self.server, request_node_type=request_node_type) - async def get_all_full_blocks(self) -> List[FullBlock]: + async def get_all_full_blocks(self) -> list[FullBlock]: peak: Optional[BlockRecord] = self.full_node.blockchain.get_peak() if peak is None: return [] @@ -120,7 +121,7 @@ async def update_autofarm_config(self, enable_autofarm: bool) -> bool: await self.farm_new_transaction_block(FarmNewBlockProtocol(self.bt.farmer_ph)) return self.auto_farm - async def get_all_coins(self, request: GetAllCoinsProtocol) -> List[CoinRecord]: + async def get_all_coins(self, request: GetAllCoinsProtocol) -> list[CoinRecord]: return await self.full_node.coin_store.get_all_coins(request.include_spent_coins) async def revert_block_height(self, new_height: uint32) -> None: @@ -149,15 +150,15 @@ async def revert_block_height(self, new_height: uint32) -> None: # reload mempool await self.full_node.mempool_manager.new_peak(block_record, None) - async def get_all_puzzle_hashes(self) -> Dict[bytes32, Tuple[uint128, int]]: + async def get_all_puzzle_hashes(self) -> dict[bytes32, tuple[uint128, int]]: # puzzle_hash, (total_amount, num_transactions) - ph_total_amount: Dict[bytes32, Tuple[uint128, int]] = {} - all_non_spent_coins: List[CoinRecord] = await self.get_all_coins(GetAllCoinsProtocol(False)) + ph_total_amount: dict[bytes32, tuple[uint128, int]] = {} + all_non_spent_coins: list[CoinRecord] = await self.get_all_coins(GetAllCoinsProtocol(False)) for cr in all_non_spent_coins: if cr.coin.puzzle_hash not in ph_total_amount: ph_total_amount[cr.coin.puzzle_hash] = (uint128(cr.coin.amount), 1) else: - dict_value: Tuple[uint128, int] = ph_total_amount[cr.coin.puzzle_hash] + dict_value: tuple[uint128, int] = ph_total_amount[cr.coin.puzzle_hash] ph_total_amount[cr.coin.puzzle_hash] = (uint128(cr.coin.amount + dict_value[0]), dict_value[1] + 1) return ph_total_amount @@ -171,7 +172,7 @@ async def farm_new_transaction_block( current_blocks = await self.get_all_full_blocks() if len(current_blocks) == 0: genesis = self.bt.get_consecutive_blocks(uint8(1))[0] - pre_validation_results: List[PreValidationResult] = await pre_validate_blocks_multiprocessing( + pre_validation_results: list[PreValidationResult] = await pre_validate_blocks_multiprocessing( self.full_node.blockchain.constants, self.full_node.blockchain, [genesis], @@ -232,7 +233,7 @@ async def farm_new_block(self, request: FarmNewBlockProtocol, force_wait_for_tim current_blocks = await self.get_all_full_blocks() if len(current_blocks) == 0: genesis = self.bt.get_consecutive_blocks(uint8(1))[0] - pre_validation_results: List[PreValidationResult] = await pre_validate_blocks_multiprocessing( + pre_validation_results: list[PreValidationResult] = await pre_validate_blocks_multiprocessing( self.full_node.blockchain.constants, self.full_node.blockchain, [genesis], @@ -476,7 +477,7 @@ async def wait_transaction_records_entered_mempool( records: The transaction records to wait for. """ with anyio.fail_after(delay=adjusted_timeout(timeout)): - ids_to_check: Set[bytes32] = set() + ids_to_check: set[bytes32] = set() for record in records: if record.spend_bundle is None: continue @@ -507,7 +508,7 @@ async def wait_bundle_ids_in_mempool( records: The bundle ids to wait for. """ with anyio.fail_after(delay=adjusted_timeout(timeout)): - ids_to_check: Set[bytes32] = set(bundle_ids) + ids_to_check: set[bytes32] = set(bundle_ids) for backoff in backoff_times(): found = set() @@ -535,7 +536,7 @@ async def wait_transaction_records_marked_as_in_mempool( records: The transaction records to wait for. """ with anyio.fail_after(delay=adjusted_timeout(timeout)): - ids_to_check: Set[bytes32] = set(record_ids) + ids_to_check: set[bytes32] = set(record_ids) for backoff in backoff_times(): found = set() @@ -564,7 +565,7 @@ async def process_transaction_records( with anyio.fail_after(delay=adjusted_timeout(timeout)): await self.wait_for_self_synced(timeout=None) - coins_to_wait_for: Set[Coin] = set() + coins_to_wait_for: set[Coin] = set() for record in records: if record.spend_bundle is None: continue @@ -588,7 +589,7 @@ async def process_spend_bundles( """ with anyio.fail_after(delay=adjusted_timeout(timeout)): - coins_to_wait_for: Set[Coin] = {addition for bundle in bundles for addition in bundle.additions()} + coins_to_wait_for: set[Coin] = {addition for bundle in bundles for addition in bundle.additions()} return await self.process_coin_spends(coins=coins_to_wait_for, timeout=None) async def process_coin_spends( @@ -610,7 +611,7 @@ async def process_coin_spends( while True: await self.farm_blocks_to_puzzlehash(count=1, guarantee_transaction_blocks=True, timeout=None) - found: Set[Coin] = set() + found: set[Coin] = set() for coin in coin_set: # TODO: is this the proper check? if await coin_store.get_coin_record(coin.name()) is not None: @@ -646,10 +647,10 @@ async def process_all_wallet_transactions(self, wallet: Wallet, timeout: Optiona async def check_transactions_confirmed( self, wallet_state_manager: WalletStateManager, - transactions: List[TransactionRecord], + transactions: list[TransactionRecord], timeout: Optional[float] = 5, ) -> None: - transactions_left: Set[bytes32] = {tx.name for tx in transactions} + transactions_left: set[bytes32] = {tx.name for tx in transactions} with anyio.fail_after(delay=adjusted_timeout(timeout)): for backoff in backoff_times(): transactions_left = transactions_left & { @@ -663,11 +664,11 @@ async def check_transactions_confirmed( async def create_coins_with_amounts( self, - amounts: List[uint64], + amounts: list[uint64], wallet: Wallet, per_transaction_record_group: int = 50, timeout: Union[None, float] = 15, - ) -> Set[Coin]: + ) -> set[Coin]: """Create coins with the requested amount. This is useful when you need a bunch of coins for a test and don't need to farm that many. @@ -691,8 +692,8 @@ async def create_coins_with_amounts( if len(amounts) == 0: return set() - outputs: List[Payment] = [] - amounts_seen: Set[uint64] = set() + outputs: list[Payment] = [] + amounts_seen: set[uint64] = set() for amount in amounts: # We need unique puzzle hash amount combos so we'll only generate a new puzzle hash when we've already # seen that amount sent to that puzzle hash @@ -700,7 +701,7 @@ async def create_coins_with_amounts( outputs.append(Payment(puzzle_hash, amount)) amounts_seen.add(amount) - transaction_records: List[TransactionRecord] = [] + transaction_records: list[TransactionRecord] = [] outputs_iterator = iter(outputs) while True: # The outputs iterator must be second in the zip() call otherwise we lose @@ -736,7 +737,7 @@ def tx_id_in_mempool(self, tx_id: bytes32) -> bool: spendbundle = self.full_node.mempool_manager.get_spendbundle(bundle_hash=tx_id) return spendbundle is not None - def txs_in_mempool(self, txs: List[TransactionRecord]) -> bool: + def txs_in_mempool(self, txs: list[TransactionRecord]) -> bool: return all(self.tx_id_in_mempool(tx_id=tx.spend_bundle.name()) for tx in txs if tx.spend_bundle is not None) async def self_is_synced(self) -> bool: @@ -767,7 +768,7 @@ async def wait_for_wallet_synced( break await asyncio.sleep(backoff_time) - async def wallets_are_synced(self, wallet_nodes: List[WalletNode], peak_height: Optional[uint32] = None) -> bool: + async def wallets_are_synced(self, wallet_nodes: list[WalletNode], peak_height: Optional[uint32] = None) -> bool: return all( [ await self.wallet_is_synced(wallet_node=wallet_node, peak_height=peak_height) @@ -777,7 +778,7 @@ async def wallets_are_synced(self, wallet_nodes: List[WalletNode], peak_height: async def wait_for_wallets_synced( self, - wallet_nodes: List[WalletNode], + wallet_nodes: list[WalletNode], timeout: Optional[float] = 5, peak_height: Optional[uint32] = None, ) -> None: diff --git a/chia/simulator/setup_services.py b/chia/simulator/setup_services.py index 71c2da53fd78..e2ce8b0b0159 100644 --- a/chia/simulator/setup_services.py +++ b/chia/simulator/setup_services.py @@ -6,10 +6,11 @@ import signal import sqlite3 import time +from collections.abc import AsyncGenerator, AsyncIterator, Iterator from contextlib import asynccontextmanager, contextmanager from pathlib import Path from types import FrameType -from typing import Any, AsyncGenerator, AsyncIterator, Dict, Iterator, List, Optional, Tuple, Union +from typing import Any, Optional, Union from chia.cmds.init_funcs import init from chia.consensus.constants import ConsensusConstants, replace_str_to_bytes @@ -53,7 +54,7 @@ @contextmanager -def create_lock_and_load_config(certs_path: Path, root_path: Path) -> Iterator[Dict[str, Any]]: +def create_lock_and_load_config(certs_path: Path, root_path: Path) -> Iterator[dict[str, Any]]: init(None, root_path) init(certs_path, root_path) path = config_path_for_filename(root_path=root_path, filename="config.yaml") @@ -63,7 +64,7 @@ def create_lock_and_load_config(certs_path: Path, root_path: Path) -> Iterator[D yield config -def get_capability_overrides(node_type: NodeType, disabled_capabilities: List[Capability]) -> List[Tuple[uint16, str]]: +def get_capability_overrides(node_type: NodeType, disabled_capabilities: list[Capability]) -> list[tuple[uint16, str]]: return [ ( capability @@ -102,7 +103,7 @@ async def setup_full_node( sanitize_weight_proof_only: bool = False, connect_to_daemon: bool = False, db_version: int = 1, - disable_capabilities: Optional[List[Capability]] = None, + disable_capabilities: Optional[list[Capability]] = None, *, reuse_db: bool = False, ) -> AsyncGenerator[Union[FullNodeService, SimulatorFullNodeService], None]: @@ -482,7 +483,7 @@ async def setup_timelord( full_node_port: int, sanitizer: bool, consensus_constants: ConsensusConstants, - config: Dict[str, Any], + config: dict[str, Any], root_path: Path, vdf_port: uint16 = uint16(0), ) -> AsyncGenerator[TimelordService, None]: diff --git a/chia/simulator/simulator_full_node_rpc_api.py b/chia/simulator/simulator_full_node_rpc_api.py index fa588f544f9d..154fd252418e 100644 --- a/chia/simulator/simulator_full_node_rpc_api.py +++ b/chia/simulator/simulator_full_node_rpc_api.py @@ -1,7 +1,5 @@ from __future__ import annotations -from typing import Dict, List - from chia.rpc.full_node_rpc_api import FullNodeRpcApi from chia.rpc.rpc_server import Endpoint, EndpointResult from chia.simulator.full_node_simulator import FullNodeSimulator @@ -19,7 +17,7 @@ def simulator_api(self) -> FullNodeSimulator: assert isinstance(self.service.server.api, FullNodeSimulator) return self.service.server.api - def get_routes(self) -> Dict[str, Endpoint]: + def get_routes(self) -> dict[str, Endpoint]: routes = super().get_routes() routes["/get_all_blocks"] = self.get_all_blocks routes["/farm_block"] = self.farm_block @@ -32,11 +30,11 @@ def get_routes(self) -> Dict[str, Endpoint]: routes["/reorg_blocks"] = self.reorg_blocks return routes - async def get_all_blocks(self, _request: Dict[str, object]) -> EndpointResult: - all_blocks: List[FullBlock] = await self.simulator_api.get_all_full_blocks() + async def get_all_blocks(self, _request: dict[str, object]) -> EndpointResult: + all_blocks: list[FullBlock] = await self.simulator_api.get_all_full_blocks() return {"blocks": [block.to_json_dict() for block in all_blocks]} - async def farm_block(self, _request: Dict[str, object]) -> EndpointResult: + async def farm_block(self, _request: dict[str, object]) -> EndpointResult: request_address = str(_request["address"]) guarantee_tx_block = bool(_request.get("guarantee_tx_block", False)) blocks = int(str(_request.get("blocks", 1))) # mypy made me do this @@ -51,29 +49,29 @@ async def farm_block(self, _request: Dict[str, object]) -> EndpointResult: await self.simulator_api.farm_new_block(req) return {"new_peak_height": (cur_height if cur_height is not None else 0) + blocks} - async def set_auto_farming(self, _request: Dict[str, object]) -> EndpointResult: + async def set_auto_farming(self, _request: dict[str, object]) -> EndpointResult: auto_farm = bool(_request["auto_farm"]) result = await self.simulator_api.update_autofarm_config(auto_farm) return {"auto_farm_enabled": result} - async def get_auto_farming(self, _request: Dict[str, object]) -> EndpointResult: + async def get_auto_farming(self, _request: dict[str, object]) -> EndpointResult: return {"auto_farm_enabled": self.simulator_api.auto_farm} - async def get_farming_ph(self, _request: Dict[str, object]) -> EndpointResult: + async def get_farming_ph(self, _request: dict[str, object]) -> EndpointResult: return {"puzzle_hash": self.simulator_api.bt.farmer_ph.hex()} - async def get_all_coins(self, _request: Dict[str, object]) -> EndpointResult: + async def get_all_coins(self, _request: dict[str, object]) -> EndpointResult: p_request = GetAllCoinsProtocol(bool(_request.get("include_spent_coins", False))) - result: List[CoinRecord] = await self.simulator_api.get_all_coins(p_request) + result: list[CoinRecord] = await self.simulator_api.get_all_coins(p_request) return {"coin_records": [coin_record.to_json_dict() for coin_record in result]} - async def get_all_puzzle_hashes(self, _request: Dict[str, object]) -> EndpointResult: + async def get_all_puzzle_hashes(self, _request: dict[str, object]) -> EndpointResult: result = await self.simulator_api.get_all_puzzle_hashes() return { "puzzle_hashes": {puzzle_hash.hex(): (amount, num_tx) for (puzzle_hash, (amount, num_tx)) in result.items()} } - async def revert_blocks(self, _request: Dict[str, object]) -> EndpointResult: + async def revert_blocks(self, _request: dict[str, object]) -> EndpointResult: blocks = int(str(_request.get("num_of_blocks", 1))) # number of blocks to revert all_blocks = bool(_request.get("delete_all_blocks", False)) # revert all blocks height = self.service.blockchain.get_peak_height() @@ -84,7 +82,7 @@ async def revert_blocks(self, _request: Dict[str, object]) -> EndpointResult: await self.simulator_api.revert_block_height(uint32(new_height)) return {"new_peak_height": new_height} - async def reorg_blocks(self, _request: Dict[str, object]) -> EndpointResult: + async def reorg_blocks(self, _request: dict[str, object]) -> EndpointResult: fork_blocks = int(str(_request.get("num_of_blocks_to_rev", 1))) # number of blocks to go back new_blocks = int(str(_request.get("num_of_new_blocks", 1))) # how many extra blocks should we add all_blocks = bool(_request.get("revert_all_blocks", False)) # fork all blocks diff --git a/chia/simulator/simulator_full_node_rpc_client.py b/chia/simulator/simulator_full_node_rpc_client.py index 70ad09fade24..78c5e6b36bfd 100644 --- a/chia/simulator/simulator_full_node_rpc_client.py +++ b/chia/simulator/simulator_full_node_rpc_client.py @@ -1,7 +1,5 @@ from __future__ import annotations -from typing import Dict, List, Tuple - from chia.rpc.full_node_rpc_client import FullNodeRpcClient from chia.types.blockchain_format.sized_bytes import bytes32 from chia.types.coin_record import CoinRecord @@ -11,7 +9,7 @@ class SimulatorFullNodeRpcClient(FullNodeRpcClient): - async def get_all_blocks(self) -> List[FullBlock]: + async def get_all_blocks(self) -> list[FullBlock]: json_blocks = (await self.fetch("get_all_blocks", {}))["blocks"] return [FullBlock.from_json_dict(block) for block in json_blocks] @@ -34,11 +32,11 @@ async def get_farming_ph(self) -> bytes32: result = await self.fetch("get_farming_ph", {}) return bytes32.from_hexstr(result["puzzle_hash"]) - async def get_all_coins(self, include_spent_coins: bool = False) -> List[CoinRecord]: + async def get_all_coins(self, include_spent_coins: bool = False) -> list[CoinRecord]: json_result = await self.fetch("get_all_coins", {"include_spent_coins": include_spent_coins}) return [CoinRecord.from_json_dict(coin_records) for coin_records in json_result["coin_records"]] - async def get_all_puzzle_hashes(self) -> Dict[bytes32, Tuple[uint128, int]]: + async def get_all_puzzle_hashes(self) -> dict[bytes32, tuple[uint128, int]]: str_result = (await self.fetch("get_all_puzzle_hashes", {}))["puzzle_hashes"] return {bytes32.from_hexstr(ph): (uint128(amount), num_tx) for (ph, (amount, num_tx)) in str_result.items()} diff --git a/chia/simulator/simulator_test_tools.py b/chia/simulator/simulator_test_tools.py index cce56d94e9cc..552175ee2403 100644 --- a/chia/simulator/simulator_test_tools.py +++ b/chia/simulator/simulator_test_tools.py @@ -1,8 +1,9 @@ from __future__ import annotations import sys +from collections.abc import AsyncGenerator from pathlib import Path -from typing import Any, AsyncGenerator, Dict, Optional, Tuple +from typing import Any, Optional from chia_rs import PrivateKey @@ -34,7 +35,7 @@ """ -def mnemonic_fingerprint(keychain: Keychain) -> Tuple[str, int]: +def mnemonic_fingerprint(keychain: Keychain) -> tuple[str, int]: mnemonic = ( "today grape album ticket joy idle supreme sausage " "oppose voice angle roast you oven betray exact " @@ -62,10 +63,10 @@ def get_puzzle_hash_from_key(keychain: Keychain, fingerprint: int, key_id: int = def create_config( chia_root: Path, fingerprint: int, - private_ca_crt_and_key: Tuple[bytes, bytes], - node_certs_and_keys: Dict[str, Dict[str, Dict[str, bytes]]], + private_ca_crt_and_key: tuple[bytes, bytes], + node_certs_and_keys: dict[str, dict[str, dict[str, bytes]]], keychain: Keychain, -) -> Dict[str, Any]: +) -> dict[str, Any]: # create chia directories create_default_chia_config(chia_root) create_all_ssl( @@ -119,8 +120,8 @@ async def get_full_chia_simulator( chia_root: Path, keychain: Optional[Keychain] = None, automated_testing: bool = False, - config: Optional[Dict[str, Any]] = None, -) -> AsyncGenerator[Tuple[FullNodeSimulator, Path, Dict[str, Any], str, int, Keychain], None]: + config: Optional[dict[str, Any]] = None, +) -> AsyncGenerator[tuple[FullNodeSimulator, Path, dict[str, Any], str, int, Keychain], None]: """ A chia root Path is required. The chia root Path can be a temporary directory (tempfile.TemporaryDirectory) diff --git a/chia/simulator/socket.py b/chia/simulator/socket.py index cb3e40bf74de..175cc2d280fc 100644 --- a/chia/simulator/socket.py +++ b/chia/simulator/socket.py @@ -2,9 +2,8 @@ import socket from contextlib import closing -from typing import Set -recent_ports: Set[int] = set() +recent_ports: set[int] = set() def find_available_listen_port(name: str = "free") -> int: diff --git a/chia/simulator/ssl_certs.py b/chia/simulator/ssl_certs.py index f0251090ecb3..a0b82f8a2581 100644 --- a/chia/simulator/ssl_certs.py +++ b/chia/simulator/ssl_certs.py @@ -2,7 +2,7 @@ import itertools from dataclasses import dataclass, field -from typing import Dict, Generic, List, Tuple, TypeVar +from typing import Generic, TypeVar from chia.simulator.ssl_certs_1 import SSL_TEST_NODE_CERTS_AND_KEYS_1, SSL_TEST_PRIVATE_CA_CERT_AND_KEY_1 from chia.simulator.ssl_certs_2 import SSL_TEST_NODE_CERTS_AND_KEYS_2, SSL_TEST_PRIVATE_CA_CERT_AND_KEY_2 @@ -39,12 +39,12 @@ def mark_not_in_use(self) -> None: @dataclass class SSLTestCACertAndPrivateKey(SSLTestCollateralTracker): - cert_and_key: Tuple[bytes, bytes] + cert_and_key: tuple[bytes, bytes] @dataclass class SSLTestNodeCertsAndKeys(SSLTestCollateralTracker): - certs_and_keys: Dict[str, Dict[str, Dict[str, bytes]]] + certs_and_keys: dict[str, dict[str, dict[str, bytes]]] @dataclass @@ -64,7 +64,7 @@ def __del__(self) -> None: # Private CA certs/keys # --------------------------------------------------------------------------- -SSL_TEST_PRIVATE_CA_CERTS_AND_KEYS: List[SSLTestCACertAndPrivateKey] = [ +SSL_TEST_PRIVATE_CA_CERTS_AND_KEYS: list[SSLTestCACertAndPrivateKey] = [ SSLTestCACertAndPrivateKey(SSL_TEST_PRIVATE_CA_CERT_AND_KEY_1), SSLTestCACertAndPrivateKey(SSL_TEST_PRIVATE_CA_CERT_AND_KEY_2), SSLTestCACertAndPrivateKey(SSL_TEST_PRIVATE_CA_CERT_AND_KEY_3), @@ -81,7 +81,7 @@ def __del__(self) -> None: # Node -> cert/key mappings # --------------------------------------------------------------------------- -SSL_TEST_NODE_CERTS_AND_KEYS: List[SSLTestNodeCertsAndKeys] = [ +SSL_TEST_NODE_CERTS_AND_KEYS: list[SSLTestNodeCertsAndKeys] = [ SSLTestNodeCertsAndKeys(SSL_TEST_NODE_CERTS_AND_KEYS_1), SSLTestNodeCertsAndKeys(SSL_TEST_NODE_CERTS_AND_KEYS_2), SSLTestNodeCertsAndKeys(SSL_TEST_NODE_CERTS_AND_KEYS_3), diff --git a/chia/simulator/ssl_certs_1.py b/chia/simulator/ssl_certs_1.py index b1a47ba6cd77..7a65c0e57ba7 100644 --- a/chia/simulator/ssl_certs_1.py +++ b/chia/simulator/ssl_certs_1.py @@ -1,7 +1,5 @@ from __future__ import annotations -from typing import Dict, Tuple - SSL_TEST_PRIVATE_CA_CRT = b"""-----BEGIN CERTIFICATE----- MIIDKTCCAhGgAwIBAgIUZnoqyaLGQMl08azdwpafGGfEGR0wDQYJKoZIhvcNAQEL BQAwRDENMAsGA1UECgwEQ2hpYTEQMA4GA1UEAwwHQ2hpYSBDQTEhMB8GA1UECwwY @@ -665,9 +663,9 @@ -----END PRIVATE KEY----- """ -SSL_TEST_PRIVATE_CA_CERT_AND_KEY_1: Tuple[bytes, bytes] = (SSL_TEST_PRIVATE_CA_CRT, SSL_TEST_PRIVATE_CA_KEY) +SSL_TEST_PRIVATE_CA_CERT_AND_KEY_1: tuple[bytes, bytes] = (SSL_TEST_PRIVATE_CA_CRT, SSL_TEST_PRIVATE_CA_KEY) -SSL_TEST_NODE_CERTS_AND_KEYS_1: Dict[str, Dict[str, Dict[str, bytes]]] = { +SSL_TEST_NODE_CERTS_AND_KEYS_1: dict[str, dict[str, dict[str, bytes]]] = { "full_node": { "private": {"crt": SSL_TEST_FULLNODE_PRIVATE_CRT, "key": SSL_TEST_FULLNODE_PRIVATE_KEY}, "public": {"crt": SSL_TEST_FULLNODE_PUBLIC_CRT, "key": SSL_TEST_FULLNODE_PUBLIC_KEY}, diff --git a/chia/simulator/ssl_certs_10.py b/chia/simulator/ssl_certs_10.py index ad959b0f15f3..9231a73f9011 100644 --- a/chia/simulator/ssl_certs_10.py +++ b/chia/simulator/ssl_certs_10.py @@ -1,7 +1,5 @@ from __future__ import annotations -from typing import Dict, Tuple - SSL_TEST_PRIVATE_CA_CRT = b"""-----BEGIN CERTIFICATE----- MIIDKTCCAhGgAwIBAgIUWzGLmUq3RgS/u2oYomRSyKsXWwcwDQYJKoZIhvcNAQEL BQAwRDENMAsGA1UECgwEQ2hpYTEQMA4GA1UEAwwHQ2hpYSBDQTEhMB8GA1UECwwY @@ -665,9 +663,9 @@ -----END PRIVATE KEY----- """ -SSL_TEST_PRIVATE_CA_CERT_AND_KEY_10: Tuple[bytes, bytes] = (SSL_TEST_PRIVATE_CA_CRT, SSL_TEST_PRIVATE_CA_KEY) +SSL_TEST_PRIVATE_CA_CERT_AND_KEY_10: tuple[bytes, bytes] = (SSL_TEST_PRIVATE_CA_CRT, SSL_TEST_PRIVATE_CA_KEY) -SSL_TEST_NODE_CERTS_AND_KEYS_10: Dict[str, Dict[str, Dict[str, bytes]]] = { +SSL_TEST_NODE_CERTS_AND_KEYS_10: dict[str, dict[str, dict[str, bytes]]] = { "full_node": { "private": {"crt": SSL_TEST_FULLNODE_PRIVATE_CRT, "key": SSL_TEST_FULLNODE_PRIVATE_KEY}, "public": {"crt": SSL_TEST_FULLNODE_PUBLIC_CRT, "key": SSL_TEST_FULLNODE_PUBLIC_KEY}, diff --git a/chia/simulator/ssl_certs_2.py b/chia/simulator/ssl_certs_2.py index 2b94240cee89..ef8d756288fc 100644 --- a/chia/simulator/ssl_certs_2.py +++ b/chia/simulator/ssl_certs_2.py @@ -1,7 +1,5 @@ from __future__ import annotations -from typing import Dict, Tuple - SSL_TEST_PRIVATE_CA_CRT = b"""-----BEGIN CERTIFICATE----- MIIDKTCCAhGgAwIBAgIUbFhRlgpIM3M+ZYuTigQ1Vbmi6P4wDQYJKoZIhvcNAQEL BQAwRDENMAsGA1UECgwEQ2hpYTEQMA4GA1UEAwwHQ2hpYSBDQTEhMB8GA1UECwwY @@ -665,9 +663,9 @@ -----END PRIVATE KEY----- """ -SSL_TEST_PRIVATE_CA_CERT_AND_KEY_2: Tuple[bytes, bytes] = (SSL_TEST_PRIVATE_CA_CRT, SSL_TEST_PRIVATE_CA_KEY) +SSL_TEST_PRIVATE_CA_CERT_AND_KEY_2: tuple[bytes, bytes] = (SSL_TEST_PRIVATE_CA_CRT, SSL_TEST_PRIVATE_CA_KEY) -SSL_TEST_NODE_CERTS_AND_KEYS_2: Dict[str, Dict[str, Dict[str, bytes]]] = { +SSL_TEST_NODE_CERTS_AND_KEYS_2: dict[str, dict[str, dict[str, bytes]]] = { "full_node": { "private": {"crt": SSL_TEST_FULLNODE_PRIVATE_CRT, "key": SSL_TEST_FULLNODE_PRIVATE_KEY}, "public": {"crt": SSL_TEST_FULLNODE_PUBLIC_CRT, "key": SSL_TEST_FULLNODE_PUBLIC_KEY}, diff --git a/chia/simulator/ssl_certs_3.py b/chia/simulator/ssl_certs_3.py index 900008043bca..0ba21ec94f4b 100644 --- a/chia/simulator/ssl_certs_3.py +++ b/chia/simulator/ssl_certs_3.py @@ -1,7 +1,5 @@ from __future__ import annotations -from typing import Dict, Tuple - SSL_TEST_PRIVATE_CA_CRT = b"""-----BEGIN CERTIFICATE----- MIIDKTCCAhGgAwIBAgIUeLxDdxR+RmiMMvvCxRfsQJAL7vUwDQYJKoZIhvcNAQEL BQAwRDENMAsGA1UECgwEQ2hpYTEQMA4GA1UEAwwHQ2hpYSBDQTEhMB8GA1UECwwY @@ -665,9 +663,9 @@ -----END PRIVATE KEY----- """ -SSL_TEST_PRIVATE_CA_CERT_AND_KEY_3: Tuple[bytes, bytes] = (SSL_TEST_PRIVATE_CA_CRT, SSL_TEST_PRIVATE_CA_KEY) +SSL_TEST_PRIVATE_CA_CERT_AND_KEY_3: tuple[bytes, bytes] = (SSL_TEST_PRIVATE_CA_CRT, SSL_TEST_PRIVATE_CA_KEY) -SSL_TEST_NODE_CERTS_AND_KEYS_3: Dict[str, Dict[str, Dict[str, bytes]]] = { +SSL_TEST_NODE_CERTS_AND_KEYS_3: dict[str, dict[str, dict[str, bytes]]] = { "full_node": { "private": {"crt": SSL_TEST_FULLNODE_PRIVATE_CRT, "key": SSL_TEST_FULLNODE_PRIVATE_KEY}, "public": {"crt": SSL_TEST_FULLNODE_PUBLIC_CRT, "key": SSL_TEST_FULLNODE_PUBLIC_KEY}, diff --git a/chia/simulator/ssl_certs_4.py b/chia/simulator/ssl_certs_4.py index 5a02888c5b47..c9cd905a1fb6 100644 --- a/chia/simulator/ssl_certs_4.py +++ b/chia/simulator/ssl_certs_4.py @@ -1,7 +1,5 @@ from __future__ import annotations -from typing import Dict, Tuple - SSL_TEST_PRIVATE_CA_CRT = b"""-----BEGIN CERTIFICATE----- MIIDKTCCAhGgAwIBAgIUS+xy2kGNomsBGfU0DEELWiHoSa4wDQYJKoZIhvcNAQEL BQAwRDENMAsGA1UECgwEQ2hpYTEQMA4GA1UEAwwHQ2hpYSBDQTEhMB8GA1UECwwY @@ -665,9 +663,9 @@ -----END PRIVATE KEY----- """ -SSL_TEST_PRIVATE_CA_CERT_AND_KEY_4: Tuple[bytes, bytes] = (SSL_TEST_PRIVATE_CA_CRT, SSL_TEST_PRIVATE_CA_KEY) +SSL_TEST_PRIVATE_CA_CERT_AND_KEY_4: tuple[bytes, bytes] = (SSL_TEST_PRIVATE_CA_CRT, SSL_TEST_PRIVATE_CA_KEY) -SSL_TEST_NODE_CERTS_AND_KEYS_4: Dict[str, Dict[str, Dict[str, bytes]]] = { +SSL_TEST_NODE_CERTS_AND_KEYS_4: dict[str, dict[str, dict[str, bytes]]] = { "full_node": { "private": {"crt": SSL_TEST_FULLNODE_PRIVATE_CRT, "key": SSL_TEST_FULLNODE_PRIVATE_KEY}, "public": {"crt": SSL_TEST_FULLNODE_PUBLIC_CRT, "key": SSL_TEST_FULLNODE_PUBLIC_KEY}, diff --git a/chia/simulator/ssl_certs_5.py b/chia/simulator/ssl_certs_5.py index be868dcd4978..f19adba3c224 100644 --- a/chia/simulator/ssl_certs_5.py +++ b/chia/simulator/ssl_certs_5.py @@ -1,7 +1,5 @@ from __future__ import annotations -from typing import Dict, Tuple - SSL_TEST_PRIVATE_CA_CRT = b"""-----BEGIN CERTIFICATE----- MIIDKTCCAhGgAwIBAgIUXU/nGxb+rZck2qIMztmDWKDZCBcwDQYJKoZIhvcNAQEL BQAwRDENMAsGA1UECgwEQ2hpYTEQMA4GA1UEAwwHQ2hpYSBDQTEhMB8GA1UECwwY @@ -665,9 +663,9 @@ -----END PRIVATE KEY----- """ -SSL_TEST_PRIVATE_CA_CERT_AND_KEY_5: Tuple[bytes, bytes] = (SSL_TEST_PRIVATE_CA_CRT, SSL_TEST_PRIVATE_CA_KEY) +SSL_TEST_PRIVATE_CA_CERT_AND_KEY_5: tuple[bytes, bytes] = (SSL_TEST_PRIVATE_CA_CRT, SSL_TEST_PRIVATE_CA_KEY) -SSL_TEST_NODE_CERTS_AND_KEYS_5: Dict[str, Dict[str, Dict[str, bytes]]] = { +SSL_TEST_NODE_CERTS_AND_KEYS_5: dict[str, dict[str, dict[str, bytes]]] = { "full_node": { "private": {"crt": SSL_TEST_FULLNODE_PRIVATE_CRT, "key": SSL_TEST_FULLNODE_PRIVATE_KEY}, "public": {"crt": SSL_TEST_FULLNODE_PUBLIC_CRT, "key": SSL_TEST_FULLNODE_PUBLIC_KEY}, diff --git a/chia/simulator/ssl_certs_6.py b/chia/simulator/ssl_certs_6.py index 8cc74ca3deb7..97a8221b4b81 100644 --- a/chia/simulator/ssl_certs_6.py +++ b/chia/simulator/ssl_certs_6.py @@ -1,7 +1,5 @@ from __future__ import annotations -from typing import Dict, Tuple - SSL_TEST_PRIVATE_CA_CRT = b"""-----BEGIN CERTIFICATE----- MIIDKTCCAhGgAwIBAgIUD5VUdvJQlRhGJg2WJ+/K8I2sYZcwDQYJKoZIhvcNAQEL BQAwRDENMAsGA1UECgwEQ2hpYTEQMA4GA1UEAwwHQ2hpYSBDQTEhMB8GA1UECwwY @@ -665,9 +663,9 @@ -----END PRIVATE KEY----- """ -SSL_TEST_PRIVATE_CA_CERT_AND_KEY_6: Tuple[bytes, bytes] = (SSL_TEST_PRIVATE_CA_CRT, SSL_TEST_PRIVATE_CA_KEY) +SSL_TEST_PRIVATE_CA_CERT_AND_KEY_6: tuple[bytes, bytes] = (SSL_TEST_PRIVATE_CA_CRT, SSL_TEST_PRIVATE_CA_KEY) -SSL_TEST_NODE_CERTS_AND_KEYS_6: Dict[str, Dict[str, Dict[str, bytes]]] = { +SSL_TEST_NODE_CERTS_AND_KEYS_6: dict[str, dict[str, dict[str, bytes]]] = { "full_node": { "private": {"crt": SSL_TEST_FULLNODE_PRIVATE_CRT, "key": SSL_TEST_FULLNODE_PRIVATE_KEY}, "public": {"crt": SSL_TEST_FULLNODE_PUBLIC_CRT, "key": SSL_TEST_FULLNODE_PUBLIC_KEY}, diff --git a/chia/simulator/ssl_certs_7.py b/chia/simulator/ssl_certs_7.py index cfbb8e07daab..ddbccf57c2a5 100644 --- a/chia/simulator/ssl_certs_7.py +++ b/chia/simulator/ssl_certs_7.py @@ -1,7 +1,5 @@ from __future__ import annotations -from typing import Dict, Tuple - SSL_TEST_PRIVATE_CA_CRT = b"""-----BEGIN CERTIFICATE----- MIIDKTCCAhGgAwIBAgIUHoeobLQu3yMmraIDXDF+F6M4j9IwDQYJKoZIhvcNAQEL BQAwRDENMAsGA1UECgwEQ2hpYTEQMA4GA1UEAwwHQ2hpYSBDQTEhMB8GA1UECwwY @@ -665,9 +663,9 @@ -----END PRIVATE KEY----- """ -SSL_TEST_PRIVATE_CA_CERT_AND_KEY_7: Tuple[bytes, bytes] = (SSL_TEST_PRIVATE_CA_CRT, SSL_TEST_PRIVATE_CA_KEY) +SSL_TEST_PRIVATE_CA_CERT_AND_KEY_7: tuple[bytes, bytes] = (SSL_TEST_PRIVATE_CA_CRT, SSL_TEST_PRIVATE_CA_KEY) -SSL_TEST_NODE_CERTS_AND_KEYS_7: Dict[str, Dict[str, Dict[str, bytes]]] = { +SSL_TEST_NODE_CERTS_AND_KEYS_7: dict[str, dict[str, dict[str, bytes]]] = { "full_node": { "private": {"crt": SSL_TEST_FULLNODE_PRIVATE_CRT, "key": SSL_TEST_FULLNODE_PRIVATE_KEY}, "public": {"crt": SSL_TEST_FULLNODE_PUBLIC_CRT, "key": SSL_TEST_FULLNODE_PUBLIC_KEY}, diff --git a/chia/simulator/ssl_certs_8.py b/chia/simulator/ssl_certs_8.py index b5aba1c85e4e..235d0bbb1c47 100644 --- a/chia/simulator/ssl_certs_8.py +++ b/chia/simulator/ssl_certs_8.py @@ -1,7 +1,5 @@ from __future__ import annotations -from typing import Dict, Tuple - SSL_TEST_PRIVATE_CA_CRT = b"""-----BEGIN CERTIFICATE----- MIIDKTCCAhGgAwIBAgIUaAgNa9r45P7lGSz9yjEAMmSYuA4wDQYJKoZIhvcNAQEL BQAwRDENMAsGA1UECgwEQ2hpYTEQMA4GA1UEAwwHQ2hpYSBDQTEhMB8GA1UECwwY @@ -665,9 +663,9 @@ -----END PRIVATE KEY----- """ -SSL_TEST_PRIVATE_CA_CERT_AND_KEY_8: Tuple[bytes, bytes] = (SSL_TEST_PRIVATE_CA_CRT, SSL_TEST_PRIVATE_CA_KEY) +SSL_TEST_PRIVATE_CA_CERT_AND_KEY_8: tuple[bytes, bytes] = (SSL_TEST_PRIVATE_CA_CRT, SSL_TEST_PRIVATE_CA_KEY) -SSL_TEST_NODE_CERTS_AND_KEYS_8: Dict[str, Dict[str, Dict[str, bytes]]] = { +SSL_TEST_NODE_CERTS_AND_KEYS_8: dict[str, dict[str, dict[str, bytes]]] = { "full_node": { "private": {"crt": SSL_TEST_FULLNODE_PRIVATE_CRT, "key": SSL_TEST_FULLNODE_PRIVATE_KEY}, "public": {"crt": SSL_TEST_FULLNODE_PUBLIC_CRT, "key": SSL_TEST_FULLNODE_PUBLIC_KEY}, diff --git a/chia/simulator/ssl_certs_9.py b/chia/simulator/ssl_certs_9.py index 720e4019316c..5ba1e5a8c5b2 100644 --- a/chia/simulator/ssl_certs_9.py +++ b/chia/simulator/ssl_certs_9.py @@ -1,7 +1,5 @@ from __future__ import annotations -from typing import Dict, Tuple - SSL_TEST_PRIVATE_CA_CRT = b"""-----BEGIN CERTIFICATE----- MIIDKTCCAhGgAwIBAgIUX9kvHEjir5fniTg3GRQ8wO7zOvAwDQYJKoZIhvcNAQEL BQAwRDENMAsGA1UECgwEQ2hpYTEQMA4GA1UEAwwHQ2hpYSBDQTEhMB8GA1UECwwY @@ -665,9 +663,9 @@ -----END PRIVATE KEY----- """ -SSL_TEST_PRIVATE_CA_CERT_AND_KEY_9: Tuple[bytes, bytes] = (SSL_TEST_PRIVATE_CA_CRT, SSL_TEST_PRIVATE_CA_KEY) +SSL_TEST_PRIVATE_CA_CERT_AND_KEY_9: tuple[bytes, bytes] = (SSL_TEST_PRIVATE_CA_CRT, SSL_TEST_PRIVATE_CA_KEY) -SSL_TEST_NODE_CERTS_AND_KEYS_9: Dict[str, Dict[str, Dict[str, bytes]]] = { +SSL_TEST_NODE_CERTS_AND_KEYS_9: dict[str, dict[str, dict[str, bytes]]] = { "full_node": { "private": {"crt": SSL_TEST_FULLNODE_PRIVATE_CRT, "key": SSL_TEST_FULLNODE_PRIVATE_KEY}, "public": {"crt": SSL_TEST_FULLNODE_PUBLIC_CRT, "key": SSL_TEST_FULLNODE_PUBLIC_KEY}, diff --git a/chia/simulator/start_simulator.py b/chia/simulator/start_simulator.py index d3f2215e8cb8..f049daefad35 100644 --- a/chia/simulator/start_simulator.py +++ b/chia/simulator/start_simulator.py @@ -5,7 +5,7 @@ from dataclasses import dataclass from multiprocessing import freeze_support from pathlib import Path -from typing import Any, Dict, List, Optional, Tuple +from typing import Any, Optional from chia.full_node.full_node import FullNode from chia.server.outbound_message import NodeType @@ -34,10 +34,10 @@ async def create_full_node_simulator_service( root_path: Path, - config: Dict[str, Any], + config: dict[str, Any], bt: BlockTools, connect_to_daemon: bool = True, - override_capabilities: Optional[List[Tuple[uint16, str]]] = None, + override_capabilities: Optional[list[tuple[uint16, str]]] = None, ) -> SimulatorFullNodeService: service_config = config[SERVICE_NAME] constants = bt.constants diff --git a/chia/simulator/wallet_tools.py b/chia/simulator/wallet_tools.py index 7ea899e4d332..2a76aa817294 100644 --- a/chia/simulator/wallet_tools.py +++ b/chia/simulator/wallet_tools.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Any, Dict, List, Optional, Tuple +from typing import Any, Optional from chia_rs import AugSchemeMPL, G1Element, G2Element, PrivateKey from clvm.casts import int_from_bytes, int_to_bytes @@ -32,8 +32,8 @@ class WalletTool: next_address = 0 - pubkey_num_lookup: Dict[bytes, uint32] = {} - puzzle_pk_cache: Dict[bytes32, PrivateKey] = {} + pubkey_num_lookup: dict[bytes, uint32] = {} + puzzle_pk_cache: dict[bytes32, PrivateKey] = {} def __init__(self, constants: ConsensusConstants, sk: Optional[PrivateKey] = None): self.constants = constants @@ -43,8 +43,8 @@ def __init__(self, constants: ConsensusConstants, sk: Optional[PrivateKey] = Non self.private_key = sk else: self.private_key = AugSchemeMPL.key_gen(DEFAULT_SEED) - self.generator_lookups: Dict = {} - self.puzzle_pk_cache: Dict = {} + self.generator_lookups: dict = {} + self.puzzle_pk_cache: dict = {} self.get_new_puzzle() def get_next_address_index(self) -> uint32: @@ -83,13 +83,13 @@ def sign(self, value: bytes, pubkey: bytes) -> G2Element: privatekey: PrivateKey = master_sk_to_wallet_sk(self.private_key, self.pubkey_num_lookup[pubkey]) return AugSchemeMPL.sign(privatekey, value) - def make_solution(self, condition_dic: Dict[ConditionOpcode, List[ConditionWithArgs]]) -> Program: + def make_solution(self, condition_dic: dict[ConditionOpcode, list[ConditionWithArgs]]) -> Program: ret = [] for con_list in condition_dic.values(): for cvp in con_list: if cvp.opcode == ConditionOpcode.CREATE_COIN and len(cvp.vars) > 2: - formatted: List[Any] = [] + formatted: list[Any] = [] formatted.extend(cvp.vars) formatted[2] = cvp.vars[2:] ret.append([cvp.opcode.value] + formatted) @@ -101,13 +101,13 @@ def generate_unsigned_transaction( self, amount: uint64, new_puzzle_hash: bytes32, - coins: List[Coin], - condition_dic: Dict[ConditionOpcode, List[ConditionWithArgs]], + coins: list[Coin], + condition_dic: dict[ConditionOpcode, list[ConditionWithArgs]], fee: int = 0, secret_key: Optional[PrivateKey] = None, - additional_outputs: Optional[List[Tuple[bytes32, int]]] = None, + additional_outputs: Optional[list[tuple[bytes32, int]]] = None, memo: Optional[bytes32] = None, - ) -> List[CoinSpend]: + ) -> list[CoinSpend]: spends = [] spend_value = sum(c.amount for c in coins) @@ -134,7 +134,7 @@ def generate_unsigned_transaction( change_output = ConditionWithArgs(ConditionOpcode.CREATE_COIN, [change_puzzle_hash, int_to_bytes(change)]) condition_dic[output.opcode].append(change_output) - secondary_coins_cond_dic: Dict[ConditionOpcode, List[ConditionWithArgs]] = dict() + secondary_coins_cond_dic: dict[ConditionOpcode, list[ConditionWithArgs]] = dict() secondary_coins_cond_dic[ConditionOpcode.ASSERT_COIN_ANNOUNCEMENT] = [] for n, coin in enumerate(coins): puzzle_hash = coin.puzzle_hash @@ -175,7 +175,7 @@ def generate_unsigned_transaction( ) return spends - def sign_transaction(self, coin_spends: List[CoinSpend]) -> SpendBundle: + def sign_transaction(self, coin_spends: list[CoinSpend]) -> SpendBundle: signatures = [] data = agg_sig_additional_data(self.constants.AGG_SIG_ME_ADDITIONAL_DATA) agg_sig_opcodes = [ @@ -214,9 +214,9 @@ def generate_signed_transaction( amount: uint64, new_puzzle_hash: bytes32, coin: Coin, - condition_dic: Dict[ConditionOpcode, List[ConditionWithArgs]] = None, + condition_dic: dict[ConditionOpcode, list[ConditionWithArgs]] = None, fee: int = 0, - additional_outputs: Optional[List[Tuple[bytes32, int]]] = None, + additional_outputs: Optional[list[tuple[bytes32, int]]] = None, memo: Optional[bytes32] = None, ) -> SpendBundle: if condition_dic is None: @@ -231,10 +231,10 @@ def generate_signed_transaction_multiple_coins( self, amount: uint64, new_puzzle_hash: bytes32, - coins: List[Coin], - condition_dic: Dict[ConditionOpcode, List[ConditionWithArgs]] = None, + coins: list[Coin], + condition_dic: dict[ConditionOpcode, list[ConditionWithArgs]] = None, fee: int = 0, - additional_outputs: Optional[List[Tuple[bytes32, int]]] = None, + additional_outputs: Optional[list[tuple[bytes32, int]]] = None, ) -> SpendBundle: if condition_dic is None: condition_dic = {} diff --git a/chia/ssl/create_ssl.py b/chia/ssl/create_ssl.py index fd16332d3921..29bde0956901 100644 --- a/chia/ssl/create_ssl.py +++ b/chia/ssl/create_ssl.py @@ -3,7 +3,7 @@ import datetime import os from pathlib import Path -from typing import Any, Dict, List, Optional, Tuple +from typing import Any, Optional import importlib_resources from cryptography import x509 @@ -15,7 +15,7 @@ from chia.util.ssl_check import DEFAULT_PERMISSIONS_CERT_FILE, DEFAULT_PERMISSIONS_KEY_FILE -_all_private_node_names: List[str] = [ +_all_private_node_names: list[str] = [ "full_node", "wallet", "farmer", @@ -25,10 +25,10 @@ "data_layer", "daemon", ] -_all_public_node_names: List[str] = ["full_node", "wallet", "farmer", "introducer", "timelord", "data_layer"] +_all_public_node_names: list[str] = ["full_node", "wallet", "farmer", "introducer", "timelord", "data_layer"] -def get_chia_ca_crt_key() -> Tuple[Any, Any]: +def get_chia_ca_crt_key() -> tuple[Any, Any]: here = importlib_resources.files(__name__.rpartition(".")[0]) crt = here.joinpath("chia_ca.crt").read_bytes() key = here.joinpath("chia_ca.key").read_bytes() @@ -57,7 +57,7 @@ def write_ssl_cert_and_key(cert_path: Path, cert_data: bytes, key_path: Path, ke f.write(data) # lgtm [py/clear-text-storage-sensitive-data] -def ensure_ssl_dirs(dirs: List[Path]): +def ensure_ssl_dirs(dirs: list[Path]): """Create SSL dirs with a default 755 mode if necessary""" for dir in dirs: if not dir.exists(): @@ -137,10 +137,10 @@ def make_ca_cert(cert_path: Path, key_path: Path): def create_all_ssl( root_path: Path, *, - private_ca_crt_and_key: Optional[Tuple[bytes, bytes]] = None, - node_certs_and_keys: Optional[Dict[str, Dict]] = None, - private_node_names: List[str] = _all_private_node_names, - public_node_names: List[str] = _all_public_node_names, + private_ca_crt_and_key: Optional[tuple[bytes, bytes]] = None, + node_certs_and_keys: Optional[dict[str, dict]] = None, + private_node_names: list[str] = _all_private_node_names, + public_node_names: list[str] = _all_public_node_names, overwrite: bool = True, ): # remove old key and crt @@ -219,9 +219,9 @@ def generate_ssl_for_nodes( ca_key: bytes, *, prefix: str, - nodes: List[str], + nodes: list[str], overwrite: bool = True, - node_certs_and_keys: Optional[Dict[str, Dict]] = None, + node_certs_and_keys: Optional[dict[str, dict]] = None, ): for node_name in nodes: node_dir = ssl_dir / node_name diff --git a/chia/timelord/iters_from_block.py b/chia/timelord/iters_from_block.py index 52b4668eaea7..f57b88997161 100644 --- a/chia/timelord/iters_from_block.py +++ b/chia/timelord/iters_from_block.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Optional, Tuple, Union +from typing import Optional, Union from chia.consensus.constants import ConsensusConstants from chia.consensus.pot_iterations import calculate_ip_iters, calculate_iterations_quality, calculate_sp_iters @@ -16,7 +16,7 @@ def iters_from_block( sub_slot_iters: uint64, difficulty: uint64, height: uint32, -) -> Tuple[uint64, uint64]: +) -> tuple[uint64, uint64]: if reward_chain_block.challenge_chain_sp_vdf is None: assert reward_chain_block.signage_point_index == 0 cc_sp: bytes32 = reward_chain_block.pos_ss_cc_challenge_hash diff --git a/chia/timelord/timelord.py b/chia/timelord/timelord.py index a80268df87a4..462f4783ca8b 100644 --- a/chia/timelord/timelord.py +++ b/chia/timelord/timelord.py @@ -9,9 +9,10 @@ import random import time import traceback +from collections.abc import AsyncIterator from concurrent.futures import ThreadPoolExecutor from pathlib import Path -from typing import TYPE_CHECKING, Any, AsyncIterator, ClassVar, Dict, List, Optional, Set, Tuple, cast +from typing import TYPE_CHECKING, Any, ClassVar, Optional, cast from chiavdf import create_discriminant, prove @@ -81,50 +82,50 @@ def server(self) -> ChiaServer: return self._server - def __init__(self, root_path: Path, config: Dict[str, Any], constants: ConsensusConstants) -> None: + def __init__(self, root_path: Path, config: dict[str, Any], constants: ConsensusConstants) -> None: self.config = config self.root_path = root_path self.constants = constants self._shut_down = False - self.free_clients: List[Tuple[str, asyncio.StreamReader, asyncio.StreamWriter]] = [] + self.free_clients: list[tuple[str, asyncio.StreamReader, asyncio.StreamWriter]] = [] self.ip_whitelist = self.config["vdf_clients"]["ip"] self._server: Optional[ChiaServer] = None - self.chain_type_to_stream: Dict[Chain, Tuple[str, asyncio.StreamReader, asyncio.StreamWriter]] = {} - self.chain_start_time: Dict[Chain, float] = {} + self.chain_type_to_stream: dict[Chain, tuple[str, asyncio.StreamReader, asyncio.StreamWriter]] = {} + self.chain_start_time: dict[Chain, float] = {} # Chains that currently don't have a vdf_client. - self.unspawned_chains: List[Chain] = [ + self.unspawned_chains: list[Chain] = [ Chain.CHALLENGE_CHAIN, Chain.REWARD_CHAIN, Chain.INFUSED_CHALLENGE_CHAIN, ] # Chains that currently accept iterations. - self.allows_iters: List[Chain] = [] + self.allows_iters: list[Chain] = [] # Last peak received, None if it's already processed. self.new_peak: Optional[timelord_protocol.NewPeakTimelord] = None # Last state received. Can either be a new peak or a new EndOfSubslotBundle. # Unfinished block info, iters adjusted to the last peak. - self.unfinished_blocks: List[timelord_protocol.NewUnfinishedBlockTimelord] = [] + self.unfinished_blocks: list[timelord_protocol.NewUnfinishedBlockTimelord] = [] # Signage points iters, adjusted to the last peak. - self.signage_point_iters: List[Tuple[uint64, uint8]] = [] + self.signage_point_iters: list[tuple[uint64, uint8]] = [] # For each chain, send those info when the process spawns. - self.iters_to_submit: Dict[Chain, List[uint64]] = {} - self.iters_submitted: Dict[Chain, List[uint64]] = {} - self.iters_finished: Set[uint64] = set() + self.iters_to_submit: dict[Chain, list[uint64]] = {} + self.iters_submitted: dict[Chain, list[uint64]] = {} + self.iters_finished: set[uint64] = set() # For each iteration submitted, know if it's a signage point, an infusion point or an end of slot. - self.iteration_to_proof_type: Dict[uint64, IterationType] = {} + self.iteration_to_proof_type: dict[uint64, IterationType] = {} # List of proofs finished. - self.proofs_finished: List[Tuple[Chain, VDFInfo, VDFProof, int]] = [] + self.proofs_finished: list[tuple[Chain, VDFInfo, VDFProof, int]] = [] # Data to send at vdf_client initialization. - self.overflow_blocks: List[timelord_protocol.NewUnfinishedBlockTimelord] = [] + self.overflow_blocks: list[timelord_protocol.NewUnfinishedBlockTimelord] = [] # Incremented each time `reset_chains` has been called. # Used to label proofs in `finished_proofs` and to only filter proofs corresponding to the most recent state. self.num_resets: int = 0 - self.process_communication_tasks: List[asyncio.Task[None]] = [] + self.process_communication_tasks: list[asyncio.Task[None]] = [] self.main_loop: Optional[asyncio.Task[None]] = None self.vdf_server: Optional[asyncio.base_events.Server] = None self._shut_down = False - self.vdf_failures: List[Tuple[Chain, Optional[int]]] = [] + self.vdf_failures: list[tuple[Chain, Optional[int]]] = [] self.vdf_failures_count: int = 0 self.vdf_failure_time: float = 0 self.total_unfinished: int = 0 @@ -134,7 +135,7 @@ def __init__(self, root_path: Path, config: Dict[str, Any], constants: Consensus # Support backwards compatibility for the old `config.yaml` that has field `sanitizer_mode`. if not self.bluebox_mode: self.bluebox_mode = self.config.get("sanitizer_mode", False) - self.pending_bluebox_info: List[Tuple[float, timelord_protocol.RequestCompactProofOfTime]] = [] + self.pending_bluebox_info: list[tuple[float, timelord_protocol.RequestCompactProofOfTime]] = [] self.last_active_time = time.time() self.max_allowed_inactivity_time = 60 self.bluebox_pool: Optional[ThreadPoolExecutor] = None @@ -175,7 +176,7 @@ async def manage(self) -> AsyncIterator[None]: if self.bluebox_pool is not None: self.bluebox_pool.shutdown() - def get_connections(self, request_node_type: Optional[NodeType]) -> List[Dict[str, Any]]: + def get_connections(self, request_node_type: Optional[NodeType]) -> list[dict[str, Any]]: return default_get_connections(server=self.server, request_node_type=request_node_type) async def on_connect(self, connection: WSChiaConnection) -> None: @@ -189,7 +190,7 @@ def get_vdf_server_port(self) -> Optional[uint16]: def _set_state_changed_callback(self, callback: StateChangedProtocol) -> None: self.state_changed_callback = callback - def state_changed(self, change: str, change_data: Optional[Dict[str, Any]] = None) -> None: + def state_changed(self, change: str, change_data: Optional[dict[str, Any]] = None) -> None: if self.state_changed_callback is not None: self.state_changed_callback(change, change_data) @@ -431,7 +432,7 @@ async def _submit_iterations(self) -> None: writer.write(iter_str.encode()) await writer.drain() - def _clear_proof_list(self, iters: uint64) -> List[Tuple[Chain, VDFInfo, VDFProof, int]]: + def _clear_proof_list(self, iters: uint64) -> list[tuple[Chain, VDFInfo, VDFProof, int]]: return [ (chain, info, proof, label) for chain, info, proof, label in self.proofs_finished diff --git a/chia/timelord/timelord_launcher.py b/chia/timelord/timelord_launcher.py index 1f2118681dff..89df71d5748a 100644 --- a/chia/timelord/timelord_launcher.py +++ b/chia/timelord/timelord_launcher.py @@ -7,10 +7,11 @@ import signal import sys import time +from collections.abc import AsyncIterator from contextlib import asynccontextmanager from dataclasses import dataclass, field from types import FrameType -from typing import Any, AsyncIterator, Dict, List, Optional +from typing import Any, Optional from chia.server.signal_handlers import SignalHandlers from chia.util.chia_logging import initialize_logging @@ -26,7 +27,7 @@ class VDFClientProcessMgr: lock: asyncio.Lock = field(default_factory=asyncio.Lock) stopped: bool = False - active_processes: List[asyncio.subprocess.Process] = field(default_factory=list) + active_processes: list[asyncio.subprocess.Process] = field(default_factory=list) async def remove_process(self, proc: asyncio.subprocess.Process) -> None: async with self.lock: @@ -131,7 +132,7 @@ async def spawn_process( await proc.communicate() -async def spawn_all_processes(config: Dict, net_config: Dict, process_mgr: VDFClientProcessMgr): +async def spawn_all_processes(config: dict, net_config: dict, process_mgr: VDFClientProcessMgr): await asyncio.sleep(5) hostname = net_config["self_hostname"] if "host" not in config else config["host"] port = config["port"] @@ -152,7 +153,7 @@ async def spawn_all_processes(config: Dict, net_config: Dict, process_mgr: VDFCl await asyncio.gather(*awaitables) -async def async_main(config: Dict[str, Any], net_config: Dict[str, Any]) -> None: +async def async_main(config: dict[str, Any], net_config: dict[str, Any]) -> None: process_mgr = VDFClientProcessMgr() async def stop( diff --git a/chia/timelord/timelord_state.py b/chia/timelord/timelord_state.py index da5c85ca2ae4..e6e35c11cc07 100644 --- a/chia/timelord/timelord_state.py +++ b/chia/timelord/timelord_state.py @@ -1,7 +1,7 @@ from __future__ import annotations import logging -from typing import List, Optional, Tuple, Union +from typing import Optional, Union from chia.consensus.constants import ConsensusConstants from chia.protocols import timelord_protocol @@ -44,7 +44,7 @@ def __init__(self, constants: ConsensusConstants): self.last_peak_challenge: bytes32 = constants.GENESIS_CHALLENGE self.difficulty: uint64 = constants.DIFFICULTY_STARTING self.sub_slot_iters: uint64 = constants.SUB_SLOT_ITERS_STARTING - self.reward_challenge_cache: List[Tuple[bytes32, uint128]] = [(constants.GENESIS_CHALLENGE, uint128(0))] + self.reward_challenge_cache: list[tuple[bytes32, uint128]] = [(constants.GENESIS_CHALLENGE, uint128(0))] self.new_epoch = False self.passed_ses_height_but_not_yet_included = False self.infused_ses = False diff --git a/chia/types/block_protocol.py b/chia/types/block_protocol.py index 1febaedc0319..5eb6a843548c 100644 --- a/chia/types/block_protocol.py +++ b/chia/types/block_protocol.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import List, Optional +from typing import Optional from typing_extensions import Protocol @@ -17,4 +17,4 @@ def prev_header_hash(self) -> bytes32: ... def transactions_generator(self) -> Optional[SerializedProgram]: ... @property - def transactions_generator_ref_list(self) -> List[uint32]: ... + def transactions_generator_ref_list(self) -> list[uint32]: ... diff --git a/chia/types/blockchain_format/coin.py b/chia/types/blockchain_format/coin.py index 3f5ad914e910..07963800f005 100644 --- a/chia/types/blockchain_format/coin.py +++ b/chia/types/blockchain_format/coin.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import List, Union +from typing import Union from chia_rs import Coin @@ -11,11 +11,11 @@ __all__ = ["Coin", "coin_as_list", "hash_coin_ids"] -def coin_as_list(c: Coin) -> List[Union[bytes32, uint64]]: +def coin_as_list(c: Coin) -> list[Union[bytes32, uint64]]: return [c.parent_coin_info, c.puzzle_hash, uint64(c.amount)] -def hash_coin_ids(coin_ids: List[bytes32]) -> bytes32: +def hash_coin_ids(coin_ids: list[bytes32]) -> bytes32: if len(coin_ids) == 1: return std_hash(coin_ids[0]) diff --git a/chia/types/blockchain_format/program.py b/chia/types/blockchain_format/program.py index e1df72fdc1b0..ad64e902ff9d 100644 --- a/chia/types/blockchain_format/program.py +++ b/chia/types/blockchain_format/program.py @@ -1,7 +1,7 @@ from __future__ import annotations import io -from typing import TYPE_CHECKING, Any, Callable, Dict, Optional, Set, Tuple, Type, TypeVar +from typing import TYPE_CHECKING, Any, Callable, Optional, TypeVar from chia_rs import ALLOW_BACKREFS, MEMPOOL_MODE, run_chia_program, tree_hash from clvm.casts import int_from_bytes @@ -30,14 +30,14 @@ class Program(SExp): """ @classmethod - def parse(cls: Type[T_Program], f) -> T_Program: + def parse(cls: type[T_Program], f) -> T_Program: return sexp_from_stream(f, cls.to) def stream(self, f): sexp_to_stream(self, f) @classmethod - def from_bytes(cls: Type[T_Program], blob: bytes) -> T_Program: + def from_bytes(cls: type[T_Program], blob: bytes) -> T_Program: # this runs the program "1", which just returns the first argument. # the first argument is the buffer we want to parse. This effectively # leverages the rust parser and LazyNode, making it a lot faster to @@ -51,11 +51,11 @@ def from_bytes(cls: Type[T_Program], blob: bytes) -> T_Program: return cls.to(ret) @classmethod - def fromhex(cls: Type[T_Program], hexstr: str) -> T_Program: + def fromhex(cls: type[T_Program], hexstr: str) -> T_Program: return cls.from_bytes(hexstr_to_bytes(hexstr)) @classmethod - def from_json_dict(cls: Type[Program], json_dict: Any) -> Program: + def from_json_dict(cls: type[Program], json_dict: Any) -> Program: if isinstance(json_dict, cls): return json_dict item = hexstr_to_bytes(json_dict) @@ -124,12 +124,12 @@ def get_tree_hash_precalc(self, *args: bytes32) -> bytes32: def get_tree_hash(self) -> bytes32: return bytes32(tree_hash(bytes(self))) - def _run(self, max_cost: int, flags: int, args: Any) -> Tuple[int, Program]: + def _run(self, max_cost: int, flags: int, args: Any) -> tuple[int, Program]: prog_args = Program.to(args) cost, r = run_chia_program(self.as_bin(), prog_args.as_bin(), max_cost, flags) return cost, Program.to(r) - def run_with_cost(self, max_cost: int, args: Any, flags=DEFAULT_FLAGS) -> Tuple[int, Program]: + def run_with_cost(self, max_cost: int, args: Any, flags=DEFAULT_FLAGS) -> tuple[int, Program]: # when running puzzles in the wallet, default to enabling all soft-forks # as well as enabling mempool-mode (i.e. strict mode) return self._run(max_cost, flags, args) @@ -138,7 +138,7 @@ def run(self, args: Any, max_cost=INFINITE_COST, flags=DEFAULT_FLAGS) -> Program cost, r = self._run(max_cost, flags, args) return r - def run_with_flags(self, max_cost: int, flags: int, args: Any) -> Tuple[int, Program]: + def run_with_flags(self, max_cost: int, flags: int, args: Any) -> tuple[int, Program]: return self._run(max_cost, flags, args) # Replicates the curry function from clvm_tools, taking advantage of *args @@ -163,7 +163,7 @@ def curry(self, *args) -> Program: fixed_args = [4, (1, arg), fixed_args] return Program.to([2, (1, self), fixed_args]) - def uncurry(self) -> Tuple[Program, Program]: + def uncurry(self) -> tuple[Program, Program]: def match(o: CLVMStorage, expected: bytes) -> None: if o.atom != expected: raise ValueError(f"expected: {expected.hex()}") @@ -216,7 +216,7 @@ def __deepcopy__(self, memo): EvalError = EvalError -def _tree_hash(node: SExp, precalculated: Set[bytes32]) -> bytes32: +def _tree_hash(node: SExp, precalculated: set[bytes32]) -> bytes32: """ Hash values in `precalculated` are presumed to have been hashed already. """ @@ -252,7 +252,7 @@ def _sexp_replace(sexp: T_CLVMStorage, to_sexp: Callable[[Any], T_Program], **kw # Now split `kwargs` into two groups: those # that start with `f` and those that start with `r` - args_by_prefix: Dict[str, Dict[str, Any]] = {} + args_by_prefix: dict[str, dict[str, Any]] = {} for k, v in kwargs.items(): c = k[0] if c not in "fr": diff --git a/chia/types/blockchain_format/tree_hash.py b/chia/types/blockchain_format/tree_hash.py index 120462a6bc9e..60c57d7ac94a 100644 --- a/chia/types/blockchain_format/tree_hash.py +++ b/chia/types/blockchain_format/tree_hash.py @@ -8,7 +8,7 @@ from __future__ import annotations -from typing import Callable, List, Optional, Set, Union +from typing import Callable, Optional, Union from clvm.CLVMObject import CLVMStorage from clvm.SExp import SExp @@ -17,12 +17,12 @@ from chia.util.hash import std_hash ValueType = Union[bytes, CLVMStorage] -ValueStackType = List[ValueType] -Op = Callable[[ValueStackType, "OpStackType", Set[bytes32]], None] -OpStackType = List[Op] +ValueStackType = list[ValueType] +Op = Callable[[ValueStackType, "OpStackType", set[bytes32]], None] +OpStackType = list[Op] -def sha256_treehash(sexp: CLVMStorage, precalculated: Optional[Set[bytes32]] = None) -> bytes32: +def sha256_treehash(sexp: CLVMStorage, precalculated: Optional[set[bytes32]] = None) -> bytes32: """ Hash values in `precalculated` are presumed to have been hashed already. """ @@ -30,7 +30,7 @@ def sha256_treehash(sexp: CLVMStorage, precalculated: Optional[Set[bytes32]] = N if precalculated is None: precalculated = set() - def handle_sexp(sexp_stack: ValueStackType, op_stack: OpStackType, precalculated: Set[bytes32]) -> None: + def handle_sexp(sexp_stack: ValueStackType, op_stack: OpStackType, precalculated: set[bytes32]) -> None: # just trusting it is right, otherwise we get an attribute error sexp: SExp = sexp_stack.pop() # type: ignore[assignment] if sexp.pair: @@ -50,20 +50,20 @@ def handle_sexp(sexp_stack: ValueStackType, op_stack: OpStackType, precalculated r = std_hash(b"\1" + atom) sexp_stack.append(r) - def handle_pair(sexp_stack: ValueStackType, op_stack: OpStackType, precalculated: Set[bytes32]) -> None: + def handle_pair(sexp_stack: ValueStackType, op_stack: OpStackType, precalculated: set[bytes32]) -> None: # just trusting it is right, otherwise we get a type error p0: bytes = sexp_stack.pop() # type: ignore[assignment] p1: bytes = sexp_stack.pop() # type: ignore[assignment] sexp_stack.append(std_hash(b"\2" + p0 + p1)) - def roll(sexp_stack: ValueStackType, op_stack: OpStackType, precalculated: Set[bytes32]) -> None: + def roll(sexp_stack: ValueStackType, op_stack: OpStackType, precalculated: set[bytes32]) -> None: p0 = sexp_stack.pop() p1 = sexp_stack.pop() sexp_stack.append(p0) sexp_stack.append(p1) sexp_stack: ValueStackType = [sexp] - op_stack: List[Op] = [handle_sexp] + op_stack: list[Op] = [handle_sexp] while len(op_stack) > 0: op = op_stack.pop() op(sexp_stack, op_stack, precalculated) diff --git a/chia/types/coin_spend.py b/chia/types/coin_spend.py index 03f1c39d1592..97c0fd28b842 100644 --- a/chia/types/coin_spend.py +++ b/chia/types/coin_spend.py @@ -1,7 +1,7 @@ from __future__ import annotations from dataclasses import dataclass -from typing import Any, Dict, List, Tuple, Union +from typing import Any, Union import chia_rs @@ -46,7 +46,7 @@ def compute_additions_with_cost( cs: CoinSpend, *, max_cost: int = DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM, -) -> Tuple[List[Coin], int]: +) -> tuple[list[Coin], int]: """ Run the puzzle in the specified CoinSpend and return the cost and list of coins created by the puzzle, i.e. additions. If the cost (CLVM- and @@ -56,7 +56,7 @@ def compute_additions_with_cost( measured at the block generator level. """ parent_id = cs.coin.name() - ret: List[Coin] = [] + ret: list[Coin] = [] cost, r = cs.puzzle_reveal.run_with_cost(max_cost, cs.solution) for cond in Program.to(r).as_iter(): if cost > max_cost: @@ -85,7 +85,7 @@ def compute_additions_with_cost( return ret, cost -def compute_additions(cs: CoinSpend, *, max_cost: int = DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM) -> List[Coin]: +def compute_additions(cs: CoinSpend, *, max_cost: int = DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM) -> list[Coin]: return compute_additions_with_cost(cs, max_cost=max_cost)[0] @@ -99,10 +99,10 @@ class SpendInfo(Streamable): @dataclass(frozen=True) class CoinSpendWithConditions: coin_spend: CoinSpend - conditions: List[ConditionWithArgs] + conditions: list[ConditionWithArgs] @staticmethod - def from_json_dict(dict: Dict[str, Any]) -> CoinSpendWithConditions: + def from_json_dict(dict: dict[str, Any]) -> CoinSpendWithConditions: return CoinSpendWithConditions( CoinSpend.from_json_dict(dict["coin_spend"]), [ diff --git a/chia/types/condition_with_args.py b/chia/types/condition_with_args.py index 0c4073a5db3e..546aa3217504 100644 --- a/chia/types/condition_with_args.py +++ b/chia/types/condition_with_args.py @@ -1,7 +1,6 @@ from __future__ import annotations from dataclasses import dataclass -from typing import List from chia.types.condition_opcodes import ConditionOpcode @@ -14,4 +13,4 @@ class ConditionWithArgs: """ opcode: ConditionOpcode - vars: List[bytes] + vars: list[bytes] diff --git a/chia/types/eligible_coin_spends.py b/chia/types/eligible_coin_spends.py index b709e69c8c5d..b14b1612adb2 100644 --- a/chia/types/eligible_coin_spends.py +++ b/chia/types/eligible_coin_spends.py @@ -1,7 +1,8 @@ from __future__ import annotations import dataclasses -from typing import Awaitable, Callable, Dict, List, Optional, Tuple +from collections.abc import Awaitable +from typing import Callable, Optional from chia_rs import fast_forward_singleton, get_conditions_from_spendbundle @@ -21,7 +22,7 @@ @dataclasses.dataclass(frozen=True) class EligibilityAndAdditions: is_eligible_for_dedup: bool - spend_additions: List[Coin] + spend_additions: list[Coin] is_eligible_for_ff: bool @@ -50,7 +51,7 @@ class UnspentLineageInfo: def set_next_singleton_version( - current_singleton: Coin, singleton_additions: List[Coin], fast_forward_spends: Dict[bytes32, UnspentLineageInfo] + current_singleton: Coin, singleton_additions: list[Coin], fast_forward_spends: dict[bytes32, UnspentLineageInfo] ) -> None: """ Finds the next version of the singleton among its additions and updates the @@ -83,8 +84,8 @@ def set_next_singleton_version( def perform_the_fast_forward( unspent_lineage_info: UnspentLineageInfo, spend_data: BundleCoinSpend, - fast_forward_spends: Dict[bytes32, UnspentLineageInfo], -) -> Tuple[CoinSpend, List[Coin]]: + fast_forward_spends: dict[bytes32, UnspentLineageInfo], +) -> tuple[CoinSpend, list[Coin]]: """ Performs a singleton fast forward, including the updating of all previous additions to point to the most recent version, and updates the fast forward @@ -141,12 +142,12 @@ def perform_the_fast_forward( @dataclasses.dataclass(frozen=True) class EligibleCoinSpends: - deduplication_spends: Dict[bytes32, DedupCoinSpend] = dataclasses.field(default_factory=dict) - fast_forward_spends: Dict[bytes32, UnspentLineageInfo] = dataclasses.field(default_factory=dict) + deduplication_spends: dict[bytes32, DedupCoinSpend] = dataclasses.field(default_factory=dict) + fast_forward_spends: dict[bytes32, UnspentLineageInfo] = dataclasses.field(default_factory=dict) def get_deduplication_info( - self, *, bundle_coin_spends: Dict[bytes32, BundleCoinSpend], max_cost: int - ) -> Tuple[List[CoinSpend], uint64, List[Coin]]: + self, *, bundle_coin_spends: dict[bytes32, BundleCoinSpend], max_cost: int + ) -> tuple[list[CoinSpend], uint64, list[Coin]]: """ Checks all coin spends of a mempool item for deduplication eligibility and provides the caller with the necessary information that allows it to perform @@ -167,10 +168,10 @@ def get_deduplication_info( one we're already deduplicating on. """ cost_saving = 0 - unique_coin_spends: List[CoinSpend] = [] - unique_additions: List[Coin] = [] + unique_coin_spends: list[CoinSpend] = [] + unique_additions: list[Coin] = [] # Map of coin ID to deduplication information - new_dedup_spends: Dict[bytes32, DedupCoinSpend] = {} + new_dedup_spends: dict[bytes32, DedupCoinSpend] = {} # See if this item has coin spends that are eligible for deduplication for coin_id, spend_data in bundle_coin_spends.items(): if not spend_data.eligible_for_dedup: diff --git a/chia/types/generator_types.py b/chia/types/generator_types.py index 527cc8354af5..c96419fea5a1 100644 --- a/chia/types/generator_types.py +++ b/chia/types/generator_types.py @@ -1,7 +1,6 @@ from __future__ import annotations from dataclasses import dataclass -from typing import List from chia.types.blockchain_format.serialized_program import SerializedProgram from chia.util.streamable import Streamable, streamable @@ -11,4 +10,4 @@ @dataclass(frozen=True) class BlockGenerator(Streamable): program: SerializedProgram - generator_refs: List[bytes] + generator_refs: list[bytes] diff --git a/chia/types/internal_mempool_item.py b/chia/types/internal_mempool_item.py index 922bc8aefe41..76e0ec081fb6 100644 --- a/chia/types/internal_mempool_item.py +++ b/chia/types/internal_mempool_item.py @@ -1,7 +1,6 @@ from __future__ import annotations from dataclasses import dataclass -from typing import Dict from chia.types.blockchain_format.sized_bytes import bytes32 from chia.types.mempool_item import BundleCoinSpend @@ -16,4 +15,4 @@ class InternalMempoolItem: conds: SpendBundleConditions height_added_to_mempool: uint32 # Map of coin ID to coin spend data between the bundle and its NPCResult - bundle_coin_spends: Dict[bytes32, BundleCoinSpend] + bundle_coin_spends: dict[bytes32, BundleCoinSpend] diff --git a/chia/types/mempool_item.py b/chia/types/mempool_item.py index 5a369458e696..cefa006fc674 100644 --- a/chia/types/mempool_item.py +++ b/chia/types/mempool_item.py @@ -1,7 +1,7 @@ from __future__ import annotations from dataclasses import dataclass, field -from typing import Any, Dict, List, Optional +from typing import Any, Optional from chia.types.blockchain_format.coin import Coin from chia.types.blockchain_format.sized_bytes import bytes32 @@ -17,7 +17,7 @@ class BundleCoinSpend: coin_spend: CoinSpend eligible_for_dedup: bool eligible_for_fast_forward: bool - additions: List[Coin] + additions: list[Coin] # cost on the specific solution in this item cost: Optional[uint64] = None @@ -40,7 +40,7 @@ class MempoolItem: # Map of coin ID to coin spend data between the bundle and its # SpendBundleConditions - bundle_coin_spends: Dict[bytes32, BundleCoinSpend] = field(default_factory=dict) + bundle_coin_spends: dict[bytes32, BundleCoinSpend] = field(default_factory=dict) def __lt__(self, other: MempoolItem) -> bool: return self.fee_per_cost < other.fee_per_cost @@ -61,8 +61,8 @@ def cost(self) -> uint64: return uint64(0 if self.conds is None else self.conds.cost) @property - def additions(self) -> List[Coin]: - additions: List[Coin] = [] + def additions(self) -> list[Coin]: + additions: list[Coin] = [] for spend in self.conds.spends: for puzzle_hash, amount, _ in spend.create_coin: coin = Coin(spend.coin_id, puzzle_hash, uint64(amount)) @@ -70,10 +70,10 @@ def additions(self) -> List[Coin]: return additions @property - def removals(self) -> List[Coin]: + def removals(self) -> list[Coin]: return self.spend_bundle.removals() - def to_json_dict(self) -> Dict[str, Any]: + def to_json_dict(self) -> dict[str, Any]: return { "spend_bundle": recurse_jsonify(self.spend_bundle), "fee": recurse_jsonify(self.fee), diff --git a/chia/types/mempool_submission_status.py b/chia/types/mempool_submission_status.py index c72a10f79b19..9f0e68775d9f 100644 --- a/chia/types/mempool_submission_status.py +++ b/chia/types/mempool_submission_status.py @@ -1,7 +1,7 @@ from __future__ import annotations from dataclasses import dataclass -from typing import Dict, Optional, Union +from typing import Optional, Union from chia.types.mempool_inclusion_status import MempoolInclusionStatus from chia.util.ints import uint8 @@ -21,7 +21,7 @@ class MempoolSubmissionStatus(Streamable): inclusion_status: uint8 # MempoolInclusionStatus error_msg: Optional[str] - def to_json_dict_convenience(self) -> Dict[str, Union[str, MempoolInclusionStatus, Optional[str]]]: + def to_json_dict_convenience(self) -> dict[str, Union[str, MempoolInclusionStatus, Optional[str]]]: formatted = self.to_json_dict() formatted["inclusion_status"] = MempoolInclusionStatus(self.inclusion_status).name return formatted diff --git a/chia/types/transaction_queue_entry.py b/chia/types/transaction_queue_entry.py index 4089f9d3f45b..8ccf3ac6c74f 100644 --- a/chia/types/transaction_queue_entry.py +++ b/chia/types/transaction_queue_entry.py @@ -3,7 +3,7 @@ import asyncio import dataclasses from dataclasses import dataclass, field -from typing import ClassVar, Generic, Optional, Tuple, TypeVar, Union +from typing import ClassVar, Generic, Optional, TypeVar, Union from chia.server.ws_connection import WSChiaConnection from chia.types.blockchain_format.sized_bytes import bytes32 @@ -49,7 +49,7 @@ class TransactionQueueEntry: spend_name: bytes32 peer: Optional[WSChiaConnection] = field(compare=False) test: bool = field(compare=False) - done: ValuedEvent[Tuple[MempoolInclusionStatus, Optional[Err]]] = field( + done: ValuedEvent[tuple[MempoolInclusionStatus, Optional[Err]]] = field( default_factory=ValuedEvent, compare=False, ) diff --git a/chia/types/unfinished_header_block.py b/chia/types/unfinished_header_block.py index 1cde48147c0a..6f6054322431 100644 --- a/chia/types/unfinished_header_block.py +++ b/chia/types/unfinished_header_block.py @@ -1,7 +1,7 @@ from __future__ import annotations from dataclasses import dataclass -from typing import List, Optional +from typing import Optional from chia.types.blockchain_format.foliage import Foliage, FoliageTransactionBlock from chia.types.blockchain_format.reward_chain_block import RewardChainBlockUnfinished @@ -16,7 +16,7 @@ @dataclass(frozen=True) class UnfinishedHeaderBlock(Streamable): # Same as a FullBlock but without TransactionInfo and Generator, used by light clients - finished_sub_slots: List[EndOfSubSlotBundle] # If first sb + finished_sub_slots: list[EndOfSubSlotBundle] # If first sb reward_chain_block: RewardChainBlockUnfinished # Reward chain trunk data challenge_chain_sp_proof: Optional[VDFProof] # If not first sp in sub-slot reward_chain_sp_proof: Optional[VDFProof] # If not first sp in sub-slot diff --git a/chia/types/weight_proof.py b/chia/types/weight_proof.py index 6c2b17e7aee0..5363087e8405 100644 --- a/chia/types/weight_proof.py +++ b/chia/types/weight_proof.py @@ -1,7 +1,6 @@ from __future__ import annotations from dataclasses import dataclass -from typing import List import chia_rs @@ -32,19 +31,19 @@ @dataclass(frozen=True) # this is used only for serialization to database class RecentChainData(Streamable): - recent_chain_data: List[HeaderBlock] + recent_chain_data: list[HeaderBlock] @streamable @dataclass(frozen=True) class ProofBlockHeader(Streamable): - finished_sub_slots: List[EndOfSubSlotBundle] + finished_sub_slots: list[EndOfSubSlotBundle] reward_chain_block: RewardChainBlock @streamable @dataclass(frozen=True) class WeightProof(Streamable): - sub_epochs: List[SubEpochData] - sub_epoch_segments: List[SubEpochChallengeSegment] # sampled sub epoch - recent_chain_data: List[HeaderBlock] + sub_epochs: list[SubEpochData] + sub_epoch_segments: list[SubEpochChallengeSegment] # sampled sub epoch + recent_chain_data: list[HeaderBlock] diff --git a/chia/util/action_scope.py b/chia/util/action_scope.py index 03172115d06f..1d1bf8d0ffcd 100644 --- a/chia/util/action_scope.py +++ b/chia/util/action_scope.py @@ -1,8 +1,9 @@ from __future__ import annotations import contextlib +from collections.abc import AsyncIterator, Awaitable from dataclasses import dataclass, field -from typing import AsyncIterator, Awaitable, Callable, Generic, Optional, Protocol, Type, TypeVar, final +from typing import Callable, Generic, Optional, Protocol, TypeVar, final import aiosqlite @@ -21,7 +22,7 @@ async def use(self) -> AsyncIterator[None]: # pragma: no cover # yield included to make this a generator as expected by @contextlib.asynccontextmanager yield - async def get_resource(self, resource_type: Type[_T_SideEffects]) -> _T_SideEffects: ... + async def get_resource(self, resource_type: type[_T_SideEffects]) -> _T_SideEffects: ... async def save_resource(self, resource: SideEffects) -> None: ... @@ -62,7 +63,7 @@ async def use(self) -> AsyncIterator[None]: finally: self._active_writer = None - async def get_resource(self, resource_type: Type[_T_SideEffects]) -> _T_SideEffects: + async def get_resource(self, resource_type: type[_T_SideEffects]) -> _T_SideEffects: row = await execute_fetchone(self.get_active_writer(), "SELECT total FROM side_effects") assert row is not None side_effects = resource_type.from_bytes(row[0]) @@ -80,7 +81,7 @@ class SideEffects(Protocol): def __bytes__(self) -> bytes: ... @classmethod - def from_bytes(cls: Type[_T_SideEffects], blob: bytes) -> _T_SideEffects: ... + def from_bytes(cls: type[_T_SideEffects], blob: bytes) -> _T_SideEffects: ... _T_SideEffects = TypeVar("_T_SideEffects", bound=SideEffects) @@ -100,7 +101,7 @@ class ActionScope(Generic[_T_SideEffects, _T_Config]): """ _resource_manager: ResourceManager - _side_effects_format: Type[_T_SideEffects] + _side_effects_format: type[_T_SideEffects] _config: _T_Config # An object not intended to be mutated during the lifetime of the scope _callback: Optional[Callable[[StateInterface[_T_SideEffects]], Awaitable[None]]] = None _final_side_effects: Optional[_T_SideEffects] = field(init=False, default=None) @@ -123,11 +124,11 @@ def config(self) -> _T_Config: @contextlib.asynccontextmanager async def new_scope( cls, - side_effects_format: Type[_T_SideEffects], + side_effects_format: type[_T_SideEffects], # I want a default here in case a use case doesn't want to take advantage of the config but no default seems to # satisfy the type hint _T_Config so we'll just ignore this. config: _T_Config = object(), # type: ignore[assignment] - resource_manager_backend: Type[ResourceManager] = SQLiteResourceManager, + resource_manager_backend: type[ResourceManager] = SQLiteResourceManager, ) -> AsyncIterator[ActionScope[_T_SideEffects, _T_Config]]: async with resource_manager_backend.managed(side_effects_format()) as resource_manager: self = cls(_resource_manager=resource_manager, _side_effects_format=side_effects_format, _config=config) diff --git a/chia/util/api_decorators.py b/chia/util/api_decorators.py index a4cda2866386..3195da3db130 100644 --- a/chia/util/api_decorators.py +++ b/chia/util/api_decorators.py @@ -3,7 +3,7 @@ import functools import logging from dataclasses import dataclass, field -from typing import Callable, List, Optional, Type, TypeVar, Union, get_type_hints +from typing import Callable, Optional, TypeVar, Union, get_type_hints from typing_extensions import Concatenate, ParamSpec @@ -24,11 +24,11 @@ @dataclass class ApiMetadata: request_type: ProtocolMessageTypes - message_class: Type[Streamable] + message_class: type[Streamable] peer_required: bool = False bytes_required: bool = False execute_task: bool = False - reply_types: List[ProtocolMessageTypes] = field(default_factory=list) + reply_types: list[ProtocolMessageTypes] = field(default_factory=list) def get_metadata(function: Callable[..., object]) -> Optional[ApiMetadata]: @@ -45,9 +45,9 @@ def api_request( peer_required: bool = False, bytes_required: bool = False, execute_task: bool = False, - reply_types: Optional[List[ProtocolMessageTypes]] = None, + reply_types: Optional[list[ProtocolMessageTypes]] = None, ) -> Callable[[Callable[Concatenate[Self, S, P], R]], Callable[Concatenate[Self, Union[bytes, S], P], R]]: - non_optional_reply_types: List[ProtocolMessageTypes] + non_optional_reply_types: list[ProtocolMessageTypes] if reply_types is None: non_optional_reply_types = [] else: diff --git a/chia/util/async_pool.py b/chia/util/async_pool.py index 905982029a21..1621a7b284ac 100644 --- a/chia/util/async_pool.py +++ b/chia/util/async_pool.py @@ -6,7 +6,8 @@ import itertools import logging import traceback -from typing import AsyncIterator, Dict, Generic, Iterator, Optional, Protocol, TypeVar, final +from collections.abc import AsyncIterator, Iterator +from typing import Generic, Optional, Protocol, TypeVar, final import anyio @@ -128,7 +129,7 @@ class AsyncPool: log: logging.Logger worker_async_callable: WorkerCallable _target_worker_count: int - _workers: Dict[asyncio.Task[object], int] = dataclasses.field(init=False, default_factory=dict) + _workers: dict[asyncio.Task[object], int] = dataclasses.field(init=False, default_factory=dict) _worker_id_counter: Iterator[int] = dataclasses.field(init=False, default_factory=itertools.count) _started: asyncio.Event = dataclasses.field(default_factory=asyncio.Event) _single_use_used: bool = False diff --git a/chia/util/augmented_chain.py b/chia/util/augmented_chain.py index 519313e759ac..fd4fde1fbdb7 100644 --- a/chia/util/augmented_chain.py +++ b/chia/util/augmented_chain.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import TYPE_CHECKING, ClassVar, Dict, List, Optional, Set, Tuple, cast +from typing import TYPE_CHECKING, ClassVar, Optional, cast from chia.consensus.block_record import BlockRecord from chia.consensus.blockchain_interface import BlocksProtocol @@ -26,8 +26,8 @@ class AugmentedBlockchain: _protocol_check: ClassVar[BlocksProtocol] = cast("AugmentedBlockchain", None) _underlying: BlocksProtocol - _extra_blocks: Dict[bytes32, Tuple[FullBlock, BlockRecord]] - _height_to_hash: Dict[uint32, bytes32] + _extra_blocks: dict[bytes32, tuple[FullBlock, BlockRecord]] + _height_to_hash: dict[uint32, bytes32] def __init__(self, underlying: BlocksProtocol) -> None: self._underlying = underlying @@ -46,14 +46,14 @@ def add_extra_block(self, block: FullBlock, block_record: BlockRecord) -> None: self._height_to_hash[block_record.height] = block_record.header_hash # BlocksProtocol - async def lookup_block_generators(self, header_hash: bytes32, generator_refs: Set[uint32]) -> Dict[uint32, bytes]: + async def lookup_block_generators(self, header_hash: bytes32, generator_refs: set[uint32]) -> dict[uint32, bytes]: - generators: Dict[uint32, bytes] = {} + generators: dict[uint32, bytes] = {} # traverse the additional blocks (if any) and resolve heights into # generators to_remove = [] - curr: Optional[Tuple[FullBlock, BlockRecord]] = self._extra_blocks.get(header_hash) + curr: Optional[tuple[FullBlock, BlockRecord]] = self._extra_blocks.get(header_hash) while curr is not None: b = curr[0] if b.height in generator_refs: @@ -119,8 +119,8 @@ def contains_block(self, header_hash: bytes32) -> bool: def contains_height(self, height: uint32) -> bool: return (height in self._height_to_hash) or self._underlying.contains_height(height) - async def prev_block_hash(self, header_hashes: List[bytes32]) -> List[bytes32]: - ret: List[bytes32] = [] + async def prev_block_hash(self, header_hashes: list[bytes32]) -> list[bytes32]: + ret: list[bytes32] = [] for hh in header_hashes: b = self._extra_blocks.get(hh) if b is not None: diff --git a/chia/util/batches.py b/chia/util/batches.py index 264a145066b2..30a26b935561 100644 --- a/chia/util/batches.py +++ b/chia/util/batches.py @@ -1,7 +1,8 @@ from __future__ import annotations +from collections.abc import Collection, Iterator from dataclasses import dataclass -from typing import Collection, Generic, Iterator, List, TypeVar +from typing import Generic, TypeVar T = TypeVar("T") @@ -9,7 +10,7 @@ @dataclass(frozen=True) class Batch(Generic[T]): remaining: int - entries: List[T] + entries: list[T] def to_batches(to_split: Collection[T], batch_size: int) -> Iterator[Batch[T]]: diff --git a/chia/util/bech32m.py b/chia/util/bech32m.py index 39873a50bbfb..b35576c67d0f 100644 --- a/chia/util/bech32m.py +++ b/chia/util/bech32m.py @@ -20,7 +20,8 @@ from __future__ import annotations -from typing import Iterable, List, Optional, Tuple +from collections.abc import Iterable +from typing import Optional from chia.types.blockchain_format.sized_bytes import bytes32 @@ -32,7 +33,7 @@ CHARSET = "qpzry9x8gf2tvdw0s3jn54khce6mua7l" -def bech32_polymod(values: List[int]) -> int: +def bech32_polymod(values: list[int]) -> int: """Internal function that computes the Bech32 checksum.""" generator = [0x3B6A57B2, 0x26508E6D, 0x1EA119FA, 0x3D4233DD, 0x2A1462B3] chk = 1 @@ -44,7 +45,7 @@ def bech32_polymod(values: List[int]) -> int: return chk -def bech32_hrp_expand(hrp: str) -> List[int]: +def bech32_hrp_expand(hrp: str) -> list[int]: """Expand the HRP into values for checksum computation.""" return [ord(x) >> 5 for x in hrp] + [0] + [ord(x) & 31 for x in hrp] @@ -52,23 +53,23 @@ def bech32_hrp_expand(hrp: str) -> List[int]: M = 0x2BC830A3 -def bech32_verify_checksum(hrp: str, data: List[int]) -> bool: +def bech32_verify_checksum(hrp: str, data: list[int]) -> bool: return bech32_polymod(bech32_hrp_expand(hrp) + data) == M -def bech32_create_checksum(hrp: str, data: List[int]) -> List[int]: +def bech32_create_checksum(hrp: str, data: list[int]) -> list[int]: values = bech32_hrp_expand(hrp) + data polymod = bech32_polymod(values + [0, 0, 0, 0, 0, 0]) ^ M return [(polymod >> 5 * (5 - i)) & 31 for i in range(6)] -def bech32_encode(hrp: str, data: List[int]) -> str: +def bech32_encode(hrp: str, data: list[int]) -> str: """Compute a Bech32 string given HRP and data values.""" combined = data + bech32_create_checksum(hrp, data) return hrp + "1" + "".join([CHARSET[d] for d in combined]) -def bech32_decode(bech: str, max_length: int = 90) -> Tuple[Optional[str], Optional[List[int]]]: +def bech32_decode(bech: str, max_length: int = 90) -> tuple[Optional[str], Optional[list[int]]]: """Validate a Bech32 string, and determine HRP and data.""" bech = bech.strip() if (any(ord(x) < 33 or ord(x) > 126 for x in bech)) or (bech.lower() != bech and bech.upper() != bech): @@ -86,7 +87,7 @@ def bech32_decode(bech: str, max_length: int = 90) -> Tuple[Optional[str], Optio return hrp, data[:-6] -def convertbits(data: Iterable[int], frombits: int, tobits: int, pad: bool = True) -> List[int]: +def convertbits(data: Iterable[int], frombits: int, tobits: int, pad: bool = True) -> list[int]: """General power-of-2 base conversion.""" acc = 0 bits = 0 diff --git a/chia/util/beta_metrics.py b/chia/util/beta_metrics.py index a1b81ad7c194..56a9d07d79c0 100644 --- a/chia/util/beta_metrics.py +++ b/chia/util/beta_metrics.py @@ -6,7 +6,7 @@ import socket from dataclasses import dataclass from pathlib import Path -from typing import Any, Dict, List, Optional +from typing import Any, Optional import psutil @@ -43,7 +43,7 @@ def log_memory_metrics() -> None: log.debug(f"MEMORY - virtual memory: {psutil.virtual_memory()}, swap: {psutil.swap_memory()}") -def log_disk_metrics(root_path: Path, plot_dirs: List[str]) -> None: +def log_disk_metrics(root_path: Path, plot_dirs: list[str]) -> None: # TODO, Could this spam the logs too much for large farms? Maybe don't log usage of plot dirs and # set perdisk=False rather for psutil.disk_io_counters? Lets try it with the default interval of 15s for now. log.debug(f"DISK partitions: {psutil.disk_partitions()}") @@ -57,7 +57,7 @@ def log_disk_metrics(root_path: Path, plot_dirs: List[str]) -> None: log.debug(f"DISK - io counters: {psutil.disk_io_counters(perdisk=True)}") -def log_port_states(config: Dict[str, Any]) -> None: +def log_port_states(config: dict[str, Any]) -> None: selected_network = config["selected_network"] full_node_port = config["network_overrides"]["config"][selected_network]["default_full_node_port"] test_socket_ipv4 = socket.socket(socket.AF_INET, socket.SOCK_STREAM) diff --git a/chia/util/block_cache.py b/chia/util/block_cache.py index 10cd673d8125..ed0f7845d3a1 100644 --- a/chia/util/block_cache.py +++ b/chia/util/block_cache.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import TYPE_CHECKING, ClassVar, Dict, List, Optional, cast +from typing import TYPE_CHECKING, ClassVar, Optional, cast from chia.consensus.block_record import BlockRecord from chia.types.blockchain_format.sized_bytes import bytes32 @@ -14,12 +14,12 @@ class BlockCache: _protocol_check: ClassVar[BlockRecordsProtocol] = cast("BlockCache", None) - _block_records: Dict[bytes32, BlockRecord] - _height_to_hash: Dict[uint32, bytes32] + _block_records: dict[bytes32, BlockRecord] + _height_to_hash: dict[uint32, bytes32] def __init__( self, - blocks: Dict[bytes32, BlockRecord], + blocks: dict[bytes32, BlockRecord], ): self._block_records = blocks self._height_to_hash = {block.height: hh for hh, block in blocks.items()} @@ -52,5 +52,5 @@ def contains_height(self, height: uint32) -> bool: def try_block_record(self, header_hash: bytes32) -> Optional[BlockRecord]: return self._block_records.get(header_hash) - async def prev_block_hash(self, header_hashes: List[bytes32]) -> List[bytes32]: + async def prev_block_hash(self, header_hashes: list[bytes32]) -> list[bytes32]: return [self._block_records[h].prev_hash for h in header_hashes] diff --git a/chia/util/check_fork_next_block.py b/chia/util/check_fork_next_block.py index 8ae6061899a8..920a5b0cf9e7 100644 --- a/chia/util/check_fork_next_block.py +++ b/chia/util/check_fork_next_block.py @@ -1,6 +1,7 @@ from __future__ import annotations -from typing import Awaitable, Callable, List +from collections.abc import Awaitable +from typing import Callable from chia.consensus.blockchain_interface import BlockchainInterface from chia.server.ws_connection import WSChiaConnection @@ -10,7 +11,7 @@ async def check_fork_next_block( blockchain: BlockchainInterface, fork_point_height: uint32, - peers_with_peak: List[WSChiaConnection], + peers_with_peak: list[WSChiaConnection], check_block_future: Callable[[WSChiaConnection, uint32, BlockchainInterface], Awaitable[bool]], ) -> uint32: our_peak_height = blockchain.get_peak_height() diff --git a/chia/util/chia_logging.py b/chia/util/chia_logging.py index 4d12bc4c58ed..3cd525fdf3d1 100644 --- a/chia/util/chia_logging.py +++ b/chia/util/chia_logging.py @@ -4,7 +4,7 @@ import os from logging.handlers import SysLogHandler from pathlib import Path -from typing import Any, Dict, List, Optional, cast +from typing import Any, Optional, cast import colorlog from concurrent_log_handler import ConcurrentRotatingFileHandler @@ -17,7 +17,7 @@ default_log_level = "WARNING" -def get_beta_logging_config() -> Dict[str, Any]: +def get_beta_logging_config() -> dict[str, Any]: return { "log_filename": f"{chia_short_version()}/chia-blockchain/beta.log", "log_level": "DEBUG", @@ -29,7 +29,7 @@ def get_beta_logging_config() -> Dict[str, Any]: def get_file_log_handler( - formatter: logging.Formatter, root_path: Path, logging_config: Dict[str, object] + formatter: logging.Formatter, root_path: Path, logging_config: dict[str, object] ) -> ConcurrentRotatingFileHandler: log_path = path_from_root(root_path, str(logging_config.get("log_filename", "log/debug.log"))) log_path.parent.mkdir(parents=True, exist_ok=True) @@ -45,7 +45,7 @@ def get_file_log_handler( def initialize_logging( service_name: str, - logging_config: Dict[str, Any], + logging_config: dict[str, Any], root_path: Path, beta_root_path: Optional[Path] = None, ) -> None: @@ -57,7 +57,7 @@ def initialize_logging( f"%(levelname)-8s %(message)s", datefmt=log_date_format, ) - handlers: List[logging.Handler] = [] + handlers: list[logging.Handler] = [] if logging_config["log_stdout"]: stdout_handler = colorlog.StreamHandler() stdout_handler.setFormatter( @@ -106,7 +106,7 @@ def initialize_logging( logging.getLogger("aiosqlite").setLevel(logging.INFO) # Too much logging on debug level -def initialize_service_logging(service_name: str, config: Dict[str, Any]) -> None: +def initialize_service_logging(service_name: str, config: dict[str, Any]) -> None: logging_root_path = DEFAULT_ROOT_PATH if service_name == "daemon": # TODO: Maybe introduce a separate `daemon` section in the config instead of having `daemon_port`, `logging` diff --git a/chia/util/collection.py b/chia/util/collection.py index 39fc6b6e34ee..80bae054a028 100644 --- a/chia/util/collection.py +++ b/chia/util/collection.py @@ -1,11 +1,9 @@ from __future__ import annotations -from typing import List, Set - # Utility Functions for Collections & Sequences -def find_duplicates(array: List[int]) -> Set[int]: +def find_duplicates(array: list[int]) -> set[int]: seen = set() duplicates = set() diff --git a/chia/util/condition_tools.py b/chia/util/condition_tools.py index 872a93bb928b..cf664652b1ad 100644 --- a/chia/util/condition_tools.py +++ b/chia/util/condition_tools.py @@ -1,7 +1,7 @@ from __future__ import annotations from functools import lru_cache -from typing import Callable, Dict, List, Tuple, Union +from typing import Callable, Union from chia_rs import G1Element from clvm.casts import int_from_bytes, int_to_bytes @@ -33,7 +33,7 @@ def parse_sexp_to_condition(sexp: Program) -> ConditionWithArgs: # since the ConditionWithArgs only has atoms as the args, we can't parse # hints and memos with this function. We just exit the loop if we encounter # a pair instead of an atom - vars: List[bytes] = [] + vars: list[bytes] = [] for arg in Program(first[1]).as_iter(): a = arg.atom if a is None: @@ -47,7 +47,7 @@ def parse_sexp_to_condition(sexp: Program) -> ConditionWithArgs: return ConditionWithArgs(ConditionOpcode(op), vars) -def parse_sexp_to_conditions(sexp: Program) -> List[ConditionWithArgs]: +def parse_sexp_to_conditions(sexp: Program) -> list[ConditionWithArgs]: """ Takes a ChiaLisp sexp (list) and returns the list of ConditionWithArgss Raises an ConsensusError if it fails. @@ -56,8 +56,8 @@ def parse_sexp_to_conditions(sexp: Program) -> List[ConditionWithArgs]: @lru_cache -def agg_sig_additional_data(agg_sig_data: bytes) -> Dict[ConditionOpcode, bytes]: - ret: Dict[ConditionOpcode, bytes] = {} +def agg_sig_additional_data(agg_sig_data: bytes) -> dict[ConditionOpcode, bytes]: + ret: dict[ConditionOpcode, bytes] = {} for code in [ ConditionOpcode.AGG_SIG_PARENT, ConditionOpcode.AGG_SIG_PUZZLE, @@ -76,7 +76,7 @@ def make_aggsig_final_message( opcode: ConditionOpcode, msg: bytes, spend_conditions: Union[Coin, SpendConditions], - agg_sig_additional_data: Dict[ConditionOpcode, bytes], + agg_sig_additional_data: dict[ConditionOpcode, bytes], ) -> bytes: if isinstance(spend_conditions, Coin): coin = spend_conditions @@ -85,7 +85,7 @@ def make_aggsig_final_message( else: raise ValueError(f"Expected Coin or Spend, got {type(spend_conditions)}") # pragma: no cover - COIN_TO_ADDENDUM_F_LOOKUP: Dict[ConditionOpcode, Callable[[Coin], bytes]] = { + COIN_TO_ADDENDUM_F_LOOKUP: dict[ConditionOpcode, Callable[[Coin], bytes]] = { ConditionOpcode.AGG_SIG_PARENT: lambda coin: coin.parent_coin_info, ConditionOpcode.AGG_SIG_PUZZLE: lambda coin: coin.puzzle_hash, ConditionOpcode.AGG_SIG_AMOUNT: lambda coin: int_to_bytes(coin.amount), @@ -98,8 +98,8 @@ def make_aggsig_final_message( return msg + addendum + agg_sig_additional_data[opcode] -def pkm_pairs(conditions: SpendBundleConditions, additional_data: bytes) -> Tuple[List[G1Element], List[bytes]]: - ret: Tuple[List[G1Element], List[bytes]] = ([], []) +def pkm_pairs(conditions: SpendBundleConditions, additional_data: bytes) -> tuple[list[G1Element], list[bytes]]: + ret: tuple[list[G1Element], list[bytes]] = ([], []) data = agg_sig_additional_data(additional_data) @@ -137,11 +137,11 @@ def validate_cwa(cwa: ConditionWithArgs) -> None: def pkm_pairs_for_conditions_dict( - conditions_dict: Dict[ConditionOpcode, List[ConditionWithArgs]], + conditions_dict: dict[ConditionOpcode, list[ConditionWithArgs]], coin: Coin, additional_data: bytes, -) -> List[Tuple[G1Element, bytes]]: - ret: List[Tuple[G1Element, bytes]] = [] +) -> list[tuple[G1Element, bytes]]: + ret: list[tuple[G1Element, bytes]] = [] data = agg_sig_additional_data(additional_data) @@ -169,9 +169,9 @@ def pkm_pairs_for_conditions_dict( def created_outputs_for_conditions_dict( - conditions_dict: Dict[ConditionOpcode, List[ConditionWithArgs]], + conditions_dict: dict[ConditionOpcode, list[ConditionWithArgs]], input_coin_name: bytes32, -) -> List[Coin]: +) -> list[Coin]: output_coins = [] for cvp in conditions_dict.get(ConditionOpcode.CREATE_COIN, []): puzzle_hash, amount_bin = cvp.vars[0], cvp.vars[1] @@ -183,8 +183,8 @@ def created_outputs_for_conditions_dict( def conditions_dict_for_solution( puzzle_reveal: Union[Program, SerializedProgram], solution: Union[Program, SerializedProgram], max_cost: int -) -> Dict[ConditionOpcode, List[ConditionWithArgs]]: - conditions_dict: Dict[ConditionOpcode, List[ConditionWithArgs]] = {} +) -> dict[ConditionOpcode, list[ConditionWithArgs]]: + conditions_dict: dict[ConditionOpcode, list[ConditionWithArgs]] = {} for cvp in conditions_for_solution(puzzle_reveal, solution, max_cost): conditions_dict.setdefault(cvp.opcode, list()).append(cvp) return conditions_dict @@ -192,7 +192,7 @@ def conditions_dict_for_solution( def conditions_for_solution( puzzle_reveal: Union[Program, SerializedProgram], solution: Union[Program, SerializedProgram], max_cost: int -) -> List[ConditionWithArgs]: +) -> list[ConditionWithArgs]: # get the standard script for a puzzle hash and feed in the solution try: cost, r = puzzle_reveal.run_with_cost(max_cost, solution) diff --git a/chia/util/config.py b/chia/util/config.py index b8caed149856..60bae857ef26 100644 --- a/chia/util/config.py +++ b/chia/util/config.py @@ -10,8 +10,9 @@ import tempfile import time import traceback +from collections.abc import Iterator from pathlib import Path -from typing import Any, Callable, Dict, Iterator, List, Optional, Set, Union, cast +from typing import Any, Callable, Optional, Union, cast import importlib_resources import yaml @@ -30,7 +31,7 @@ def initial_config_file(filename: Union[str, Path]) -> str: return contents -def create_default_chia_config(root_path: Path, filenames: List[str] = ["config.yaml"]) -> None: +def create_default_chia_config(root_path: Path, filenames: list[str] = ["config.yaml"]) -> None: for filename in filenames: default_config_file_data: str = initial_config_file(filename) path: Path = config_path_for_filename(root_path, filename) @@ -66,7 +67,7 @@ def lock_and_load_config( root_path: Path, filename: Union[str, Path], fill_missing_services: bool = False, -) -> Iterator[Dict[str, Any]]: +) -> Iterator[dict[str, Any]]: with lock_config(root_path=root_path, filename=filename): config = _load_config_maybe_locked( root_path=root_path, @@ -96,7 +97,7 @@ def load_config( sub_config: Optional[str] = None, exit_on_error: bool = True, fill_missing_services: bool = False, -) -> Dict[str, Any]: +) -> dict[str, Any]: return _load_config_maybe_locked( root_path=root_path, filename=filename, @@ -114,7 +115,7 @@ def _load_config_maybe_locked( exit_on_error: bool = True, acquire_lock: bool = True, fill_missing_services: bool = False, -) -> Dict[str, Any]: +) -> dict[str, Any]: # This must be called under an acquired config lock, or acquire_lock should be True path = config_path_for_filename(root_path, filename) @@ -130,7 +131,7 @@ def _load_config_maybe_locked( for i in range(10): try: # at least we intend it to be this type - r: Dict[str, Any] + r: dict[str, Any] with contextlib.ExitStack() as exit_stack: if acquire_lock: exit_stack.enter_context(lock_config(root_path, filename)) @@ -143,7 +144,7 @@ def _load_config_maybe_locked( if fill_missing_services: r.update(load_defaults_for_missing_services(config=r, config_name=path.name)) if sub_config is not None: - r = cast(Dict[str, Any], r.get(sub_config)) + r = cast(dict[str, Any], r.get(sub_config)) return r except Exception as e: tb = traceback.format_exc() @@ -157,7 +158,7 @@ def load_config_cli( filename: str, sub_config: Optional[str] = None, fill_missing_services: bool = False, -) -> Dict[str, Any]: +) -> dict[str, Any]: """ Loads configuration from the specified filename, in the config directory, and then overrides any properties using the passed in command line arguments. @@ -182,7 +183,7 @@ def load_config_cli( return unflatten_properties(flattened_props) -def flatten_properties(config: Dict[str, Any]) -> Dict[str, Any]: +def flatten_properties(config: dict[str, Any]) -> dict[str, Any]: properties = {} for key, value in config.items(): if type(value) is dict: @@ -193,8 +194,8 @@ def flatten_properties(config: Dict[str, Any]) -> Dict[str, Any]: return properties -def unflatten_properties(config: Dict[str, Any]) -> Dict[str, Any]: - properties: Dict[str, Any] = {} +def unflatten_properties(config: dict[str, Any]) -> dict[str, Any]: + properties: dict[str, Any] = {} for key, value in config.items(): if "." in key: add_property(properties, key, value) @@ -203,7 +204,7 @@ def unflatten_properties(config: Dict[str, Any]) -> Dict[str, Any]: return properties -def add_property(d: Dict[str, Any], partial_key: str, value: Any) -> None: +def add_property(d: dict[str, Any], partial_key: str, value: Any) -> None: if "." not in partial_key: # root of dict d[partial_key] = value else: @@ -228,7 +229,7 @@ def str2bool(v: Union[str, bool]) -> bool: raise argparse.ArgumentTypeError("Boolean value expected.") -def traverse_dict(d: Dict[str, Any], key_path: str) -> Any: +def traverse_dict(d: dict[str, Any], key_path: str) -> Any: """ Traverse nested dictionaries to find the element pointed-to by key_path. Key path components are separated by a ':' e.g. @@ -256,7 +257,7 @@ def traverse_dict(d: Dict[str, Any], key_path: str) -> Any: method_strings = Literal["default", "python_default", "fork", "forkserver", "spawn"] method_values = Optional[Literal["fork", "forkserver", "spawn"]] -start_methods: Dict[method_strings, method_values] = { +start_methods: dict[method_strings, method_values] = { "default": None, "python_default": None, "fork": "fork", @@ -266,7 +267,7 @@ def traverse_dict(d: Dict[str, Any], key_path: str) -> Any: def process_config_start_method( - config: Dict[str, Any], + config: dict[str, Any], log: logging.Logger, ) -> method_values: from_config: object = config.get("multiprocessing_start_method") @@ -290,7 +291,7 @@ def process_config_start_method( return processed_method -def override_config(config: Dict[str, Any], config_overrides: Optional[Dict[str, Any]]) -> Dict[str, Any]: +def override_config(config: dict[str, Any], config_overrides: Optional[dict[str, Any]]) -> dict[str, Any]: new_config = copy.deepcopy(config) if config_overrides is None: return new_config @@ -299,13 +300,13 @@ def override_config(config: Dict[str, Any], config_overrides: Optional[Dict[str, return new_config -def selected_network_address_prefix(config: Dict[str, Any]) -> str: +def selected_network_address_prefix(config: dict[str, Any]) -> str: # we intend this to be a str at least address_prefix: str = config["network_overrides"]["config"][config["selected_network"]]["address_prefix"] return address_prefix -def load_defaults_for_missing_services(config: Dict[str, Any], config_name: str) -> Dict[str, Any]: +def load_defaults_for_missing_services(config: dict[str, Any], config_name: str) -> dict[str, Any]: services = ["data_layer"] missing_services = [service for service in services if service not in config] defaulted = {} @@ -331,16 +332,16 @@ def load_defaults_for_missing_services(config: Dict[str, Any], config_name: str) return defaulted -PEER_INFO_MAPPING: Dict[NodeType, str] = { +PEER_INFO_MAPPING: dict[NodeType, str] = { NodeType.FULL_NODE: "full_node_peer", NodeType.FARMER: "farmer_peer", } -def get_unresolved_peer_infos(service_config: Dict[str, Any], peer_type: NodeType) -> Set[UnresolvedPeerInfo]: +def get_unresolved_peer_infos(service_config: dict[str, Any], peer_type: NodeType) -> set[UnresolvedPeerInfo]: peer_info_key = PEER_INFO_MAPPING[peer_type] - peer_infos: List[Dict[str, Any]] = service_config.get(f"{peer_info_key}s", []) - peer_info: Optional[Dict[str, Any]] = service_config.get(peer_info_key) + peer_infos: list[dict[str, Any]] = service_config.get(f"{peer_info_key}s", []) + peer_info: Optional[dict[str, Any]] = service_config.get(peer_info_key) if peer_info is not None: peer_infos.append(peer_info) @@ -348,7 +349,7 @@ def get_unresolved_peer_infos(service_config: Dict[str, Any], peer_type: NodeTyp def set_peer_info( - service_config: Dict[str, Any], + service_config: dict[str, Any], peer_type: NodeType, peer_host: Optional[str] = None, peer_port: Optional[int] = None, diff --git a/chia/util/db_wrapper.py b/chia/util/db_wrapper.py index 6bae4161c5df..d6592162297c 100644 --- a/chia/util/db_wrapper.py +++ b/chia/util/db_wrapper.py @@ -6,10 +6,11 @@ import secrets import sqlite3 import sys +from collections.abc import AsyncIterator, Iterable from dataclasses import dataclass, field from datetime import datetime from pathlib import Path -from typing import Any, AsyncIterator, Dict, Iterable, List, Optional, TextIO, Tuple, Type, Union +from typing import Any, Optional, TextIO, Union import aiosqlite import anyio @@ -29,8 +30,8 @@ class DBWrapperError(Exception): class ForeignKeyError(DBWrapperError): - def __init__(self, violations: Iterable[Union[aiosqlite.Row, Tuple[str, object, str, object]]]) -> None: - self.violations: List[Dict[str, object]] = [] + def __init__(self, violations: Iterable[Union[aiosqlite.Row, tuple[str, object, str, object]]]) -> None: + self.violations: list[dict[str, object]] = [] for violation in violations: if isinstance(violation, tuple): @@ -136,7 +137,7 @@ class DBWrapper2: _lock: asyncio.Lock = field(default_factory=asyncio.Lock) _read_connections: asyncio.Queue[aiosqlite.Connection] = field(default_factory=asyncio.Queue) _num_read_connections: int = 0 - _in_use: Dict[asyncio.Task[object], aiosqlite.Connection] = field(default_factory=dict) + _in_use: dict[asyncio.Task[object], aiosqlite.Connection] = field(default_factory=dict) _current_writer: Optional[asyncio.Task[object]] = None _savepoint_name: int = 0 @@ -160,7 +161,7 @@ async def managed( journal_mode: str = "WAL", synchronous: Optional[str] = None, foreign_keys: Optional[bool] = None, - row_factory: Optional[Type[aiosqlite.Row]] = None, + row_factory: Optional[type[aiosqlite.Row]] = None, ) -> AsyncIterator[DBWrapper2]: if foreign_keys is None: foreign_keys = False @@ -217,7 +218,7 @@ async def create( journal_mode: str = "WAL", synchronous: Optional[str] = None, foreign_keys: bool = False, - row_factory: Optional[Type[aiosqlite.Row]] = None, + row_factory: Optional[type[aiosqlite.Row]] = None, ) -> DBWrapper2: # WARNING: please use .managed() instead if log_path is None: diff --git a/chia/util/dump_keyring.py b/chia/util/dump_keyring.py index 87efd668f111..e4c9b2a3004d 100755 --- a/chia/util/dump_keyring.py +++ b/chia/util/dump_keyring.py @@ -5,7 +5,7 @@ import os from io import TextIOWrapper from pathlib import Path -from typing import Any, Dict, Optional, cast +from typing import Any, Optional, cast import click import colorama @@ -66,7 +66,7 @@ def dump(keyring_file: str, full_payload: bool, passphrase_file: Optional[TextIO try: data_dict = file_content.get_decrypted_data_dict(passphrase) if full_payload: - dump_content: Dict[str, Any] = file_content_dict + dump_content: dict[str, Any] = file_content_dict dump_content["data"] = data_dict else: dump_content = data_dict diff --git a/chia/util/errors.py b/chia/util/errors.py index 1061e495866e..87defc2ef15c 100644 --- a/chia/util/errors.py +++ b/chia/util/errors.py @@ -2,7 +2,7 @@ from enum import Enum from pathlib import Path -from typing import Any, List, Optional +from typing import Any, Optional from click import ClickException @@ -208,14 +208,14 @@ def __init__(self) -> None: class ConsensusError(Exception): - def __init__(self, code: Err, errors: List[Any] = []): + def __init__(self, code: Err, errors: list[Any] = []): super().__init__(f"Error code: {code.name} {errors}") self.code = code self.errors = errors class ProtocolError(Exception): - def __init__(self, code: Err, errors: List[Any] = []): + def __init__(self, code: Err, errors: list[Any] = []): super().__init__(f"Error code: {code.name} {errors}") self.code = code self.errors = errors diff --git a/chia/util/file_keyring.py b/chia/util/file_keyring.py index 5a7d8c2f98f8..efdaa69c8b8c 100644 --- a/chia/util/file_keyring.py +++ b/chia/util/file_keyring.py @@ -6,11 +6,12 @@ import shutil import sys import threading +from collections.abc import Iterator from dataclasses import asdict, dataclass, field from hashlib import pbkdf2_hmac from pathlib import Path from secrets import token_bytes -from typing import Any, Dict, Iterator, Optional, Union, cast +from typing import Any, Optional, Union, cast import yaml from cryptography.hazmat.primitives.ciphers.aead import ChaCha20Poly1305 # pyright: reportMissingModuleSource=false @@ -144,7 +145,7 @@ def write_to_path(self, path: Path) -> None: except PermissionError: shutil.move(str(temp_path), str(path)) - def get_decrypted_data_dict(self, passphrase: str) -> Dict[str, Any]: + def get_decrypted_data_dict(self, passphrase: str) -> dict[str, Any]: if self.empty(): return {} key = symmetric_key_from_passphrase(passphrase, self.salt) @@ -165,7 +166,7 @@ def update_encrypted_data_dict( def empty(self) -> bool: return self.data is None or len(self.data) == 0 - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: result = asdict(self) result["salt"] = result["salt"].hex() result["nonce"] = result["nonce"].hex() @@ -175,30 +176,30 @@ def to_dict(self) -> Dict[str, Any]: @dataclass(frozen=True) class Key: secret: bytes - metadata: Optional[Dict[str, Any]] = None + metadata: Optional[dict[str, Any]] = None @classmethod - def parse(cls, data: str, metadata: Optional[Dict[str, Any]]) -> Key: + def parse(cls, data: str, metadata: Optional[dict[str, Any]]) -> Key: return cls( bytes.fromhex(data), metadata, ) - def to_data(self) -> Union[str, Dict[str, Any]]: + def to_data(self) -> Union[str, dict[str, Any]]: return self.secret.hex() -Users = Dict[str, Key] -Services = Dict[str, Users] +Users = dict[str, Key] +Services = dict[str, Users] @dataclass class DecryptedKeyringData: services: Services - labels: Dict[int, str] # {fingerprint: label} + labels: dict[int, str] # {fingerprint: label} @classmethod - def from_dict(cls, data_dict: Dict[str, Any]) -> DecryptedKeyringData: + def from_dict(cls, data_dict: dict[str, Any]) -> DecryptedKeyringData: return cls( { service: { @@ -210,7 +211,7 @@ def from_dict(cls, data_dict: Dict[str, Any]) -> DecryptedKeyringData: data_dict.get("labels", {}), ) - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: return { "keys": { service: {user: key.to_data() for user, key in users.items()} @@ -243,7 +244,7 @@ class FileKeyring(FileSystemEventHandler): cached_data_dict: DecryptedKeyringData = field(default_factory=default_file_keyring_data) keyring_last_mod_time: Optional[float] = None # Key/value pairs to set on the outer payload on the next write - file_content_properties_for_next_write: Dict[str, Any] = field(default_factory=dict) + file_content_properties_for_next_write: dict[str, Any] = field(default_factory=dict) @classmethod def create(cls, keys_root_path: Path = DEFAULT_KEYS_ROOT_PATH) -> FileKeyring: @@ -320,7 +321,7 @@ def cached_keys(self) -> Services: """ return self.cached_data_dict.services - def cached_labels(self) -> Dict[int, str]: + def cached_labels(self) -> dict[int, str]: """ Returns keyring.data.labels """ diff --git a/chia/util/full_block_utils.py b/chia/util/full_block_utils.py index 2dbb31a703cd..04a4e9f3f1c2 100644 --- a/chia/util/full_block_utils.py +++ b/chia/util/full_block_utils.py @@ -1,7 +1,7 @@ from __future__ import annotations from dataclasses import dataclass -from typing import Callable, List, Optional, Tuple +from typing import Callable, Optional from chia_rs import G1Element, G2Element, serialized_length from chiabip158 import PyBIP158 @@ -167,7 +167,7 @@ def skip_foliage(buf: memoryview) -> memoryview: return skip_optional(buf, skip_g2_element) # foliage_transaction_block_signature -def prev_hash_from_foliage(buf: memoryview) -> Tuple[memoryview, bytes32]: +def prev_hash_from_foliage(buf: memoryview) -> tuple[memoryview, bytes32]: prev_hash = buf[:32] # prev_block_hash buf = skip_bytes32(buf) # prev_block_hash buf = skip_bytes32(buf) # reward_block_hash @@ -230,7 +230,7 @@ def generator_from_block(buf: memoryview) -> Optional[bytes]: class GeneratorBlockInfo: prev_header_hash: bytes32 transactions_generator: Optional[SerializedProgram] - transactions_generator_ref_list: List[uint32] + transactions_generator_ref_list: list[uint32] def block_info_from_block(buf: memoryview) -> GeneratorBlockInfo: @@ -267,7 +267,7 @@ def block_info_from_block(buf: memoryview) -> GeneratorBlockInfo: def header_block_from_block( - buf: memoryview, request_filter: bool = True, tx_addition_coins: List[Coin] = [], removal_names: List[bytes32] = [] + buf: memoryview, request_filter: bool = True, tx_addition_coins: list[Coin] = [], removal_names: list[bytes32] = [] ) -> bytes: buf2 = buf[:] buf2 = skip_list(buf2, skip_end_of_sub_slot_bundle) # finished_sub_slots @@ -298,7 +298,7 @@ def header_block_from_block( else: transactions_info_optional = bytes([1]) transactions_info, advance = TransactionsInfo.parse_rust(buf2[1:]) - byte_array_tx: List[bytearray] = [] + byte_array_tx: list[bytearray] = [] if is_transaction_block and transactions_info: addition_coins = tx_addition_coins + list(transactions_info.reward_claims_incorporated) for coin in addition_coins: diff --git a/chia/util/generator_tools.py b/chia/util/generator_tools.py index 25790d8cd308..8ee76f2b84d9 100644 --- a/chia/util/generator_tools.py +++ b/chia/util/generator_tools.py @@ -1,6 +1,7 @@ from __future__ import annotations -from typing import Collection, List, Optional, Tuple +from collections.abc import Collection +from typing import Optional from chiabip158 import PyBIP158 @@ -16,7 +17,7 @@ def get_block_header( block: FullBlock, tx_addition_coins: Collection[Coin], removals_names: Collection[bytes32] ) -> HeaderBlock: # Create filter - byte_array_tx: List[bytearray] = [] + byte_array_tx: list[bytearray] = [] if block.is_transaction_block(): for coin in tx_addition_coins: byte_array_tx.append(bytearray(coin.puzzle_hash)) @@ -43,13 +44,13 @@ def get_block_header( ) -def tx_removals_and_additions(results: Optional[SpendBundleConditions]) -> Tuple[List[bytes32], List[Coin]]: +def tx_removals_and_additions(results: Optional[SpendBundleConditions]) -> tuple[list[bytes32], list[Coin]]: """ Doesn't return farmer and pool reward. """ - removals: List[bytes32] = [] - additions: List[Coin] = [] + removals: list[bytes32] = [] + additions: list[Coin] = [] # build removals list if results is None: diff --git a/chia/util/keychain.py b/chia/util/keychain.py index ca4004a99663..a72321b2b900 100644 --- a/chia/util/keychain.py +++ b/chia/util/keychain.py @@ -2,10 +2,11 @@ import sys import unicodedata +from collections.abc import Iterator from dataclasses import dataclass from hashlib import pbkdf2_hmac from pathlib import Path -from typing import Any, Dict, Iterator, List, Literal, Optional, Tuple, Union, overload +from typing import Any, Literal, Optional, Union, overload import importlib_resources from bitstring import BitArray # pyright: reportMissingImports=false @@ -41,7 +42,7 @@ def supports_os_passphrase_storage() -> bool: return sys.platform in ["darwin", "win32", "cygwin"] -def passphrase_requirements() -> Dict[str, Any]: +def passphrase_requirements() -> dict[str, Any]: """ Returns a dictionary specifying current passphrase requirements """ @@ -93,7 +94,7 @@ def bytes_to_mnemonic(mnemonic_bytes: bytes) -> str: def check_mnemonic_validity(mnemonic_str: str) -> bool: - mnemonic: List[str] = mnemonic_str.split(" ") + mnemonic: list[str] = mnemonic_str.split(" ") return len(mnemonic) in [12, 15, 18, 21, 24] @@ -103,12 +104,12 @@ def mnemonic_from_short_words(mnemonic_str: str) -> str: practice to only store the first 4 letters of each word in many offline storage solutions, also support looking up words by the first 4 characters """ - mnemonic: List[str] = mnemonic_str.split(" ") + mnemonic: list[str] = mnemonic_str.split(" ") if len(mnemonic) not in [12, 15, 18, 21, 24]: raise ValueError("Invalid mnemonic length") four_char_dict = {word[:4]: word for word in bip39_word_list().splitlines()} - full_words: List[str] = [] + full_words: list[str] = [] for word in mnemonic: full_word = four_char_dict.get(word[:4]) if full_word is None: @@ -120,7 +121,7 @@ def mnemonic_from_short_words(mnemonic_str: str) -> str: def bytes_from_mnemonic(mnemonic_str: str) -> bytes: full_mnemonic_str = mnemonic_from_short_words(mnemonic_str) - mnemonic: List[str] = full_mnemonic_str.split(" ") + mnemonic: list[str] = full_mnemonic_str.split(" ") word_list = {word: i for i, word in enumerate(bip39_word_list().splitlines())} bit_array = BitArray() @@ -185,7 +186,7 @@ def get_private_key_user(user: str, index: int) -> str: @streamable @dataclass(frozen=True) class KeyDataSecrets(Streamable): - mnemonic: List[str] + mnemonic: list[str] entropy: bytes private_key: PrivateKey @@ -259,7 +260,7 @@ def generate(cls, label: Optional[str] = None) -> KeyData: return cls.from_mnemonic(generate_mnemonic(), label) @property - def mnemonic(self) -> List[str]: + def mnemonic(self) -> list[str]: if self.secrets is None: raise KeychainSecretsMissing() return self.secrets.mnemonic @@ -434,7 +435,7 @@ def _iterate_through_key_datas( pass return None - def get_first_private_key(self) -> Optional[Tuple[PrivateKey, bytes]]: + def get_first_private_key(self) -> Optional[tuple[PrivateKey, bytes]]: """ Returns the first key in the keychain that has one of the passed in passphrases. """ @@ -442,7 +443,7 @@ def get_first_private_key(self) -> Optional[Tuple[PrivateKey, bytes]]: return key_data.private_key, key_data.entropy return None - def get_private_key_by_fingerprint(self, fingerprint: int) -> Optional[Tuple[PrivateKey, bytes]]: + def get_private_key_by_fingerprint(self, fingerprint: int) -> Optional[tuple[PrivateKey, bytes]]: """ Return first private key which have the given public key fingerprint. """ @@ -451,12 +452,12 @@ def get_private_key_by_fingerprint(self, fingerprint: int) -> Optional[Tuple[Pri return key_data.private_key, key_data.entropy return None - def get_all_private_keys(self) -> List[Tuple[PrivateKey, bytes]]: + def get_all_private_keys(self) -> list[tuple[PrivateKey, bytes]]: """ Returns all private keys which can be retrieved, with the given passphrases. A tuple of key, and entropy bytes (i.e. mnemonic) is returned for each key. """ - all_keys: List[Tuple[PrivateKey, bytes]] = [] + all_keys: list[tuple[PrivateKey, bytes]] = [] for key_data in self._iterate_through_key_datas(skip_public_only=True): all_keys.append((key_data.private_key, key_data.entropy)) return all_keys @@ -470,21 +471,21 @@ def get_key(self, fingerprint: int, include_secrets: bool = False) -> KeyData: return key_data raise KeychainFingerprintNotFound(fingerprint) - def get_keys(self, include_secrets: bool = False) -> List[KeyData]: + def get_keys(self, include_secrets: bool = False) -> list[KeyData]: """ Returns the KeyData of all keys which can be retrieved. """ - all_keys: List[KeyData] = [] + all_keys: list[KeyData] = [] for key_data in self._iterate_through_key_datas(include_secrets=include_secrets, skip_public_only=False): all_keys.append(key_data) return all_keys - def get_all_public_keys(self) -> List[G1Element]: + def get_all_public_keys(self) -> list[G1Element]: """ Returns all public keys. """ - all_keys: List[G1Element] = [] + all_keys: list[G1Element] = [] for key_data in self._iterate_through_key_datas(skip_public_only=False): all_keys.append(key_data.public_key) @@ -521,7 +522,7 @@ def delete_key_by_fingerprint(self, fingerprint: int) -> int: pass return removed - def delete_keys(self, keys_to_delete: List[Tuple[PrivateKey, bytes]]) -> None: + def delete_keys(self, keys_to_delete: list[tuple[PrivateKey, bytes]]) -> None: """ Deletes all keys in the list. """ diff --git a/chia/util/keyring_wrapper.py b/chia/util/keyring_wrapper.py index 7187811daa4b..b65e90523bc4 100644 --- a/chia/util/keyring_wrapper.py +++ b/chia/util/keyring_wrapper.py @@ -2,7 +2,7 @@ from pathlib import Path from sys import platform -from typing import ClassVar, Optional, Tuple, Union, overload +from typing import ClassVar, Optional, Union, overload from keyring.backends.macOS import Keyring as MacKeyring from keyring.backends.Windows import WinVaultKeyring as WinKeyring @@ -141,7 +141,7 @@ def get_keyring(self) -> FileKeyring: # Master passphrase support - def get_cached_master_passphrase(self) -> Tuple[Optional[str], bool]: + def get_cached_master_passphrase(self) -> tuple[Optional[str], bool]: """ Returns a tuple including the currently cached passphrase and a bool indicating whether the passphrase has been previously validated. diff --git a/chia/util/limited_semaphore.py b/chia/util/limited_semaphore.py index 9393a09d5302..955b58561fd3 100644 --- a/chia/util/limited_semaphore.py +++ b/chia/util/limited_semaphore.py @@ -2,8 +2,8 @@ import asyncio import contextlib +from collections.abc import AsyncIterator from dataclasses import dataclass -from typing import AsyncIterator from typing_extensions import final diff --git a/chia/util/lock.py b/chia/util/lock.py index 6aaecaa5b423..53ed8dcfea28 100644 --- a/chia/util/lock.py +++ b/chia/util/lock.py @@ -3,7 +3,7 @@ from dataclasses import dataclass from pathlib import Path from types import TracebackType -from typing import Optional, Type +from typing import Optional from filelock import BaseFileLock, FileLock, Timeout from typing_extensions import final @@ -31,7 +31,7 @@ def __enter__(self) -> Lockfile: def __exit__( self, - exc_type: Optional[Type[BaseException]], + exc_type: Optional[type[BaseException]], exc: Optional[BaseException], traceback: Optional[TracebackType], ) -> None: diff --git a/chia/util/log_exceptions.py b/chia/util/log_exceptions.py index f83b618c9bba..27d6802fbbab 100644 --- a/chia/util/log_exceptions.py +++ b/chia/util/log_exceptions.py @@ -2,8 +2,9 @@ import logging import traceback +from collections.abc import Iterator from contextlib import contextmanager -from typing import Iterator, Tuple, Type, Union +from typing import Union @contextmanager @@ -14,7 +15,7 @@ def log_exceptions( message: str = "Caught exception", level: int = logging.ERROR, show_traceback: bool = True, - exceptions_to_process: Union[Type[BaseException], Tuple[Type[BaseException], ...]] = Exception, + exceptions_to_process: Union[type[BaseException], tuple[type[BaseException], ...]] = Exception, ) -> Iterator[None]: try: yield diff --git a/chia/util/logging.py b/chia/util/logging.py index 0e3ea967c491..07f6386322a9 100644 --- a/chia/util/logging.py +++ b/chia/util/logging.py @@ -3,7 +3,7 @@ import logging import re import time -from typing import Pattern +from re import Pattern class TimedDuplicateFilter(logging.Filter): diff --git a/chia/util/math.py b/chia/util/math.py index d2590ec31da5..670818390537 100644 --- a/chia/util/math.py +++ b/chia/util/math.py @@ -1,14 +1,12 @@ from __future__ import annotations -from typing import List - def clamp(n: int, smallest: int, largest: int) -> int: return max(smallest, min(n, largest)) -def make_monotonically_decreasing(seq: List[float]) -> List[float]: - out: List[float] = [] +def make_monotonically_decreasing(seq: list[float]) -> list[float]: + out: list[float] = [] if len(seq) > 0: min = seq[0] for n in seq: diff --git a/chia/util/network.py b/chia/util/network.py index 2c74b45df1f0..cd3519223999 100644 --- a/chia/util/network.py +++ b/chia/util/network.py @@ -5,9 +5,10 @@ import logging import socket import ssl +from collections.abc import Iterable from dataclasses import dataclass from ipaddress import IPv4Address, IPv4Network, IPv6Address, IPv6Network, ip_address -from typing import Any, Dict, Iterable, List, Literal, Optional, Tuple, Union +from typing import Any, Literal, Optional, Union from aiohttp import web from aiohttp.log import web_logger @@ -146,7 +147,7 @@ def is_in_network(peer_host: str, networks: Iterable[Union[IPv4Network, IPv6Netw return False -def is_trusted_cidr(peer_host: str, trusted_cidrs: List[str]) -> bool: +def is_trusted_cidr(peer_host: str, trusted_cidrs: list[str]) -> bool: try: ip_obj = ipaddress.ip_address(peer_host) except ValueError: @@ -165,7 +166,7 @@ def is_localhost(peer_host: str) -> bool: def is_trusted_peer( - host: str, node_id: bytes32, trusted_peers: Dict[str, Any], trusted_cidrs: List[str], testing: bool = False + host: str, node_id: bytes32, trusted_peers: dict[str, Any], trusted_cidrs: list[str], testing: bool = False ) -> bool: return not testing and is_localhost(host) or node_id.hex() in trusted_peers or is_trusted_cidr(host, trusted_cidrs) @@ -203,8 +204,8 @@ async def resolve(host: str, *, prefer_ipv6: bool = False) -> IPAddress: return IPAddress.create(host) except ValueError: pass - addrset: List[ - Tuple[socket.AddressFamily, socket.SocketKind, int, str, Union[Tuple[str, int], Tuple[str, int, int, int]]] + addrset: list[ + tuple[socket.AddressFamily, socket.SocketKind, int, str, Union[tuple[str, int], tuple[str, int, int, int]]] ] = await asyncio.get_event_loop().getaddrinfo(host, None) # The list returned by getaddrinfo is never empty, an exception is thrown or data is returned. ips_v4 = [] @@ -224,7 +225,7 @@ async def resolve(host: str, *, prefer_ipv6: bool = False) -> IPAddress: raise ValueError(f"failed to resolve {host} into an IP address") -def select_port(prefer_ipv6: bool, addresses: List[Any]) -> uint16: +def select_port(prefer_ipv6: bool, addresses: list[Any]) -> uint16: selected_port: uint16 for address_string, port, *_ in addresses: address = ip_address(address_string) diff --git a/chia/util/paginator.py b/chia/util/paginator.py index 69bcce5377f4..f673b1633bbe 100644 --- a/chia/util/paginator.py +++ b/chia/util/paginator.py @@ -1,8 +1,8 @@ from __future__ import annotations import dataclasses +from collections.abc import Sequence from math import ceil -from typing import Sequence class InvalidPageSizeLimit(Exception): diff --git a/chia/util/permissions.py b/chia/util/permissions.py index 835146cbada4..48812039e55f 100644 --- a/chia/util/permissions.py +++ b/chia/util/permissions.py @@ -2,10 +2,9 @@ import os from pathlib import Path -from typing import Tuple -def verify_file_permissions(path: Path, mask: int) -> Tuple[bool, int]: +def verify_file_permissions(path: Path, mask: int) -> tuple[bool, int]: """ Check that the file's permissions are properly restricted, as compared to the permission mask diff --git a/chia/util/pprint.py b/chia/util/pprint.py index 471ce9ce89b5..eb53fd3a747a 100644 --- a/chia/util/pprint.py +++ b/chia/util/pprint.py @@ -1,7 +1,6 @@ from __future__ import annotations from dataclasses import dataclass -from typing import List @dataclass @@ -16,7 +15,7 @@ def __repr__(self) -> str: return f"{self.first} to {self.last}" -def int_list_to_ranges(array: List[int]) -> List[Range]: +def int_list_to_ranges(array: list[int]) -> list[Range]: if len(array) == 0: return [] sorted_array = sorted(array) @@ -36,5 +35,5 @@ def int_list_to_ranges(array: List[int]) -> List[Range]: return ranges -def print_compact_ranges(array: List[int]) -> str: +def print_compact_ranges(array: list[int]) -> str: return str(int_list_to_ranges(array)) diff --git a/chia/util/prev_transaction_block.py b/chia/util/prev_transaction_block.py index cc52db22e9fb..f94a16450b18 100644 --- a/chia/util/prev_transaction_block.py +++ b/chia/util/prev_transaction_block.py @@ -1,7 +1,5 @@ from __future__ import annotations -from typing import Tuple - from chia.consensus.block_record import BlockRecord from chia.consensus.blockchain_interface import BlockRecordsProtocol from chia.util.ints import uint128 @@ -11,7 +9,7 @@ def get_prev_transaction_block( curr: BlockRecord, blocks: BlockRecordsProtocol, total_iters_sp: uint128, -) -> Tuple[bool, BlockRecord]: +) -> tuple[bool, BlockRecord]: prev_transaction_block = curr while not curr.is_transaction_block: curr = blocks.block_record(curr.prev_hash) diff --git a/chia/util/priority_mutex.py b/chia/util/priority_mutex.py index fef148dbe7fb..a3db80c25645 100644 --- a/chia/util/priority_mutex.py +++ b/chia/util/priority_mutex.py @@ -5,8 +5,9 @@ import contextlib import dataclasses import logging +from collections.abc import AsyncIterator from enum import IntEnum -from typing import AsyncIterator, Dict, Generic, Optional, Type, TypeVar +from typing import Generic, Optional, TypeVar from typing_extensions import final @@ -45,11 +46,11 @@ class PriorityMutex(Generic[_T_Priority]): ``` """ - _deques: Dict[_T_Priority, collections.deque[_Element]] + _deques: dict[_T_Priority, collections.deque[_Element]] _active: Optional[_Element] = None @classmethod - def create(cls, priority_type: Type[_T_Priority]) -> PriorityMutex[_T_Priority]: + def create(cls, priority_type: type[_T_Priority]) -> PriorityMutex[_T_Priority]: return cls( _deques={priority: collections.deque() for priority in sorted(priority_type)}, ) diff --git a/chia/util/profiler.py b/chia/util/profiler.py index ba57c3fcee6a..2069e57b7aef 100644 --- a/chia/util/profiler.py +++ b/chia/util/profiler.py @@ -5,9 +5,10 @@ import logging import pathlib import tracemalloc +from collections.abc import AsyncIterator from contextlib import asynccontextmanager from datetime import datetime -from typing import AsyncIterator, Optional +from typing import Optional from chia.util.path import path_from_root diff --git a/chia/util/service_groups.py b/chia/util/service_groups.py index 08685ed96209..b5c05ddce360 100644 --- a/chia/util/service_groups.py +++ b/chia/util/service_groups.py @@ -1,8 +1,8 @@ from __future__ import annotations -from typing import Dict, Generator, Iterable, KeysView +from collections.abc import Generator, Iterable, KeysView -SERVICES_FOR_GROUP: Dict[str, list[str]] = { +SERVICES_FOR_GROUP: dict[str, list[str]] = { "all": [ "chia_harvester", "chia_timelord_launcher", diff --git a/chia/util/ssl_check.py b/chia/util/ssl_check.py index ce0170e229d1..1aecce4d120c 100644 --- a/chia/util/ssl_check.py +++ b/chia/util/ssl_check.py @@ -5,7 +5,7 @@ import sys from logging import Logger from pathlib import Path -from typing import List, Optional, Set, Tuple +from typing import Optional from chia.util.config import load_config, traverse_dict from chia.util.permissions import octal_mode_string, verify_file_permissions @@ -60,15 +60,15 @@ # Set to keep track of which files we've already warned about -warned_ssl_files: Set[Path] = set() +warned_ssl_files: set[Path] = set() -def get_all_ssl_file_paths(root_path: Path) -> Tuple[List[Path], List[Path]]: +def get_all_ssl_file_paths(root_path: Path) -> tuple[list[Path], list[Path]]: """Lookup config values and append to a list of files whose permissions we need to check""" from chia.ssl.create_ssl import get_mozilla_ca_crt - all_certs: List[Path] = [] - all_keys: List[Path] = [] + all_certs: list[Path] = [] + all_keys: list[Path] = [] try: config = load_config(root_path, "config.yaml", exit_on_error=False, fill_missing_services=True) @@ -99,16 +99,16 @@ def get_ssl_perm_warning(path: Path, actual_mode: int, expected_mode: int) -> st def verify_ssl_certs_and_keys( - cert_paths: List[Path], key_paths: List[Path], log: Optional[Logger] = None -) -> List[Tuple[Path, int, int]]: + cert_paths: list[Path], key_paths: list[Path], log: Optional[Logger] = None +) -> list[tuple[Path, int, int]]: """Check that file permissions are properly set for the provided SSL cert and key files""" if sys.platform == "win32" or sys.platform == "cygwin": # TODO: ACLs for SSL certs/keys on Windows return [] - invalid_files_and_modes: List[Tuple[Path, int, int]] = [] + invalid_files_and_modes: list[tuple[Path, int, int]] = [] - def verify_paths(paths: List[Path], restrict_mask: int, expected_permissions: int) -> None: + def verify_paths(paths: list[Path], restrict_mask: int, expected_permissions: int) -> None: nonlocal invalid_files_and_modes for path in paths: try: @@ -157,7 +157,7 @@ def check_ssl(root_path: Path) -> None: print("\n".join(lines), file=sys.stderr) -def check_and_fix_permissions_for_ssl_file(file: Path, mask: int, updated_mode: int) -> Tuple[bool, bool]: +def check_and_fix_permissions_for_ssl_file(file: Path, mask: int, updated_mode: int) -> tuple[bool, bool]: """Check file permissions and attempt to fix them if found to be too open""" if sys.platform == "win32" or sys.platform == "cygwin": # TODO: ACLs for SSL certs/keys on Windows diff --git a/chia/util/streamable.py b/chia/util/streamable.py index 53786bf08189..3589ef03fe0e 100644 --- a/chia/util/streamable.py +++ b/chia/util/streamable.py @@ -5,23 +5,9 @@ import os import pprint import traceback +from collections.abc import Collection from enum import Enum -from typing import ( - TYPE_CHECKING, - Any, - BinaryIO, - Callable, - ClassVar, - Collection, - Dict, - List, - Optional, - Tuple, - Type, - TypeVar, - Union, - get_type_hints, -) +from typing import TYPE_CHECKING, Any, BinaryIO, Callable, ClassVar, Optional, TypeVar, Union, get_type_hints from typing_extensions import Literal, get_args, get_origin @@ -45,7 +31,7 @@ class UnsupportedType(StreamableError): class DefinitionError(StreamableError): - def __init__(self, message: str, cls: Type[object]): + def __init__(self, message: str, cls: type[object]): super().__init__( f"{message} Correct usage is:\n\n" f"@streamable\n@dataclass(frozen=True)\nclass {cls.__name__}(Streamable):\n ..." @@ -53,7 +39,7 @@ def __init__(self, message: str, cls: Type[object]): class ParameterMissingError(StreamableError): - def __init__(self, cls: type, missing: List[str]): + def __init__(self, cls: type, missing: list[str]): super().__init__( f"{len(missing)} field{'s' if len(missing) != 1 else ''} missing for {cls.__name__}: {', '.join(missing)}" ) @@ -91,7 +77,7 @@ def __init__(self, value: object, to_type: type, exception: Exception): @dataclasses.dataclass(frozen=True) class Field: name: str - type: Type[object] + type: type[object] has_default: bool stream_function: StreamFunctionType parse_function: ParseFunctionType @@ -99,10 +85,10 @@ class Field: post_init_function: ConvertFunctionType -StreamableFields = Tuple[Field, ...] +StreamableFields = tuple[Field, ...] -def create_fields(cls: Type[DataclassInstance]) -> StreamableFields: +def create_fields(cls: type[DataclassInstance]) -> StreamableFields: hints = get_type_hints(cls) fields = [] for field in dataclasses.fields(cls): @@ -144,7 +130,7 @@ def convert_optional(convert_func: ConvertFunctionType, item: Any) -> Any: return convert_func(item) -def convert_tuple(convert_funcs: List[ConvertFunctionType], items: Collection[Any]) -> Tuple[Any, ...]: +def convert_tuple(convert_funcs: list[ConvertFunctionType], items: Collection[Any]) -> tuple[Any, ...]: if not isinstance(items, (list, tuple)): raise InvalidTypeError(tuple, type(items)) if len(items) != len(convert_funcs): @@ -152,7 +138,7 @@ def convert_tuple(convert_funcs: List[ConvertFunctionType], items: Collection[An return tuple(convert_func(item) for convert_func, item in zip(convert_funcs, items)) -def convert_list(convert_func: ConvertFunctionType, items: List[Any]) -> List[Any]: +def convert_list(convert_func: ConvertFunctionType, items: list[Any]) -> list[Any]: if not isinstance(items, list): raise InvalidTypeError(list, type(items)) return [convert_func(item) for item in items] @@ -167,7 +153,7 @@ def convert_hex_string(item: str) -> bytes: raise ConversionError(item, bytes, e) from e -def convert_byte_type(f_type: Type[Any], item: Any) -> Any: +def convert_byte_type(f_type: type[Any], item: Any) -> Any: if isinstance(item, f_type): return item if not isinstance(item, bytes): @@ -178,7 +164,7 @@ def convert_byte_type(f_type: Type[Any], item: Any) -> Any: raise ConversionError(item, f_type, e) from e -def convert_primitive(f_type: Type[Any], item: Any) -> Any: +def convert_primitive(f_type: type[Any], item: Any) -> Any: if isinstance(item, f_type): return item try: @@ -187,7 +173,7 @@ def convert_primitive(f_type: Type[Any], item: Any) -> Any: raise ConversionError(item, f_type, e) from e -def streamable_from_dict(klass: Type[_T_Streamable], item: Any) -> _T_Streamable: +def streamable_from_dict(klass: type[_T_Streamable], item: Any) -> _T_Streamable: """ Converts a dictionary based on a dataclass, into an instance of that dataclass. Recursively goes through lists, optionals, and dictionaries. @@ -208,7 +194,7 @@ def streamable_from_dict(klass: Type[_T_Streamable], item: Any) -> _T_Streamable def function_to_convert_one_item( - f_type: Type[Any], json_parser: Optional[Callable[[object], Streamable]] = None + f_type: type[Any], json_parser: Optional[Callable[[object], Streamable]] = None ) -> ConvertFunctionType: if is_type_SpecificOptional(f_type): convert_inner_func = function_to_convert_one_item(get_args(f_type)[0], json_parser) @@ -237,7 +223,7 @@ def function_to_convert_one_item( return lambda item: convert_primitive(f_type, item) -def post_init_process_item(f_type: Type[Any], item: Any) -> object: +def post_init_process_item(f_type: type[Any], item: Any) -> object: if not isinstance(item, f_type): try: item = f_type(item) @@ -255,7 +241,7 @@ def post_init_process_item(f_type: Type[Any], item: Any) -> object: return item -def function_to_post_init_process_one_item(f_type: Type[object]) -> ConvertFunctionType: +def function_to_post_init_process_one_item(f_type: type[object]) -> ConvertFunctionType: if is_type_SpecificOptional(f_type): process_inner_func = function_to_post_init_process_one_item(get_args(f_type)[0]) return lambda item: convert_optional(process_inner_func, item) @@ -309,7 +295,7 @@ def recurse_jsonify( elif d is None or type(d) is str: return d elif hasattr(d, "to_json_dict"): - ret: Union[List[Any], Dict[str, Any], str, None, int] = d.to_json_dict() + ret: Union[list[Any], dict[str, Any], str, None, int] = d.to_json_dict() return ret raise UnsupportedType(f"failed to jsonify {d} (type: {type(d)})") @@ -346,7 +332,7 @@ def parse_optional(f: BinaryIO, parse_inner_type_f: ParseFunctionType) -> Option raise ValueError("Optional must be 0 or 1") -def parse_rust(f: BinaryIO, f_type: Type[Any]) -> Any: +def parse_rust(f: BinaryIO, f_type: type[Any]) -> Any: assert isinstance(f, io.BytesIO) buf = f.getbuffer() ret, advance = f_type.parse_rust(buf[f.tell() :]) @@ -361,8 +347,8 @@ def parse_bytes(f: BinaryIO) -> bytes: return bytes_read -def parse_list(f: BinaryIO, parse_inner_type_f: ParseFunctionType) -> List[object]: - full_list: List[object] = [] +def parse_list(f: BinaryIO, parse_inner_type_f: ParseFunctionType) -> list[object]: + full_list: list[object] = [] # wjb assert inner_type != get_args(List)[0] list_size = parse_uint32(f) for list_index in range(list_size): @@ -370,8 +356,8 @@ def parse_list(f: BinaryIO, parse_inner_type_f: ParseFunctionType) -> List[objec return full_list -def parse_tuple(f: BinaryIO, list_parse_inner_type_f: List[ParseFunctionType]) -> Tuple[object, ...]: - full_list: List[object] = [] +def parse_tuple(f: BinaryIO, list_parse_inner_type_f: list[ParseFunctionType]) -> tuple[object, ...]: + full_list: list[object] = [] for parse_f in list_parse_inner_type_f: full_list.append(parse_f(f)) return tuple(full_list) @@ -384,12 +370,12 @@ def parse_str(f: BinaryIO) -> str: return bytes.decode(str_read_bytes, "utf-8") -def function_to_parse_one_item(f_type: Type[Any]) -> ParseFunctionType: +def function_to_parse_one_item(f_type: type[Any]) -> ParseFunctionType: """ This function returns a function taking one argument `f: BinaryIO` that parses and returns a value of the given type. """ - inner_type: Type[Any] + inner_type: type[Any] if f_type is bool: return parse_bool if is_type_SpecificOptional(f_type): @@ -435,7 +421,7 @@ def stream_list(stream_inner_type_func: StreamFunctionType, item: Any, f: Binary stream_inner_type_func(element, f) -def stream_tuple(stream_inner_type_funcs: List[StreamFunctionType], item: Any, f: BinaryIO) -> None: +def stream_tuple(stream_inner_type_funcs: list[StreamFunctionType], item: Any, f: BinaryIO) -> None: assert len(stream_inner_type_funcs) == len(item) for i in range(len(item)): stream_inner_type_funcs[i](item[i], f) @@ -459,8 +445,8 @@ def stream_byte_convertible(item: object, f: BinaryIO) -> None: f.write(getattr(item, "__bytes__")()) -def function_to_stream_one_item(f_type: Type[Any]) -> StreamFunctionType: - inner_type: Type[Any] +def function_to_stream_one_item(f_type: type[Any]) -> StreamFunctionType: + inner_type: type[Any] if is_type_SpecificOptional(f_type): inner_type = get_args(f_type)[0] stream_inner_type_func = function_to_stream_one_item(inner_type) @@ -489,7 +475,7 @@ def function_to_stream_one_item(f_type: Type[Any]) -> StreamFunctionType: raise UnsupportedType(f"can't stream {f_type}") -def streamable(cls: Type[_T_Streamable]) -> Type[_T_Streamable]: +def streamable(cls: type[_T_Streamable]) -> type[_T_Streamable]: """ This decorator forces correct streamable protocol syntax/usage and populates the caches for types hints and (de)serialization methods for all members of the class. The correct usage is: @@ -587,7 +573,7 @@ def __post_init__(self) -> None: raise @classmethod - def parse(cls: Type[_T_Streamable], f: BinaryIO) -> _T_Streamable: + def parse(cls: type[_T_Streamable], f: BinaryIO) -> _T_Streamable: # Create the object without calling __init__() to avoid unnecessary post-init checks in strictdataclass obj: _T_Streamable = object.__new__(cls) for field in cls._streamable_fields: @@ -602,7 +588,7 @@ def get_hash(self) -> bytes32: return std_hash(bytes(self), skip_bytes_conversion=True) @classmethod - def from_bytes(cls: Type[_T_Streamable], blob: bytes) -> _T_Streamable: + def from_bytes(cls: type[_T_Streamable], blob: bytes) -> _T_Streamable: f = io.BytesIO(blob) parsed = cls.parse(f) assert f.read() == b"" @@ -624,12 +610,12 @@ def __str__(self: Any) -> str: def __repr__(self: Any) -> str: return pp.pformat(recurse_jsonify(self)) - def to_json_dict(self) -> Dict[str, Any]: - ret: Dict[str, Any] = recurse_jsonify(self) + def to_json_dict(self) -> dict[str, Any]: + ret: dict[str, Any] = recurse_jsonify(self) return ret @classmethod - def from_json_dict(cls: Type[_T_Streamable], json_dict: Dict[str, Any]) -> _T_Streamable: + def from_json_dict(cls: type[_T_Streamable], json_dict: dict[str, Any]) -> _T_Streamable: return streamable_from_dict(cls, json_dict) diff --git a/chia/util/task_timing.py b/chia/util/task_timing.py index 7225ca89b907..ac0ea1fbcf27 100644 --- a/chia/util/task_timing.py +++ b/chia/util/task_timing.py @@ -6,8 +6,9 @@ import os import sys import time +from collections.abc import Iterator from types import FrameType -from typing import Any, Dict, Iterator, List +from typing import Any # This is a development utility that instruments tasks (coroutines) and records # wall-clock time they spend in various functions. Since it relies on @@ -49,7 +50,7 @@ def __init__(self, duration: float) -> None: class TaskInfo: - stack: Dict[FrameType, FrameInfo] + stack: dict[FrameType, FrameInfo] stack_pos: int def __init__(self) -> None: @@ -65,7 +66,7 @@ class FunctionInfo: file: str num_calls: int duration: float - callers: Dict[str, CallInfo] + callers: dict[str, CallInfo] fun_id: int def __init__(self, name: str, file: str) -> None: @@ -81,9 +82,9 @@ def __init__(self, name: str, file: str) -> None: # maps tasks to call-treea -g_function_infos: Dict[str, Dict[str, FunctionInfo]] = {} +g_function_infos: dict[str, dict[str, FunctionInfo]] = {} -g_tasks: Dict[asyncio.Task[Any], TaskInfo] = {} +g_tasks: dict[asyncio.Task[Any], TaskInfo] = {} g_cwd = os.getcwd() + "/" @@ -348,13 +349,13 @@ def maybe_manage_task_instrumentation(enable: bool) -> Iterator[None]: yield -def main(args: List[str]) -> int: +def main(args: list[str]) -> int: import glob import pathlib import subprocess profile_dir = pathlib.Path(args[0]) - queue: List[subprocess.Popen[bytes]] = [] + queue: list[subprocess.Popen[bytes]] = [] for file in glob.glob(str(profile_dir / "*.dot")): print(file) if os.path.exists(file + ".png"): diff --git a/chia/util/timing.py b/chia/util/timing.py index 3c666fa47299..705fa059c16a 100644 --- a/chia/util/timing.py +++ b/chia/util/timing.py @@ -3,7 +3,8 @@ import os import sys import time -from typing import Callable, Iterator, Optional, overload +from collections.abc import Iterator +from typing import Callable, Optional, overload system_delays = { # based on data from https://github.com/Chia-Network/chia-blockchain/pull/13724 diff --git a/chia/util/vdf_prover.py b/chia/util/vdf_prover.py index 7af0b4beffd4..3086733115ce 100644 --- a/chia/util/vdf_prover.py +++ b/chia/util/vdf_prover.py @@ -1,7 +1,5 @@ from __future__ import annotations -from typing import Tuple - from chiavdf import prove from chia.consensus.constants import ConsensusConstants @@ -17,7 +15,7 @@ def get_vdf_info_and_proof( challenge_hash: bytes32, number_iters: uint64, normalized_to_identity: bool = False, -) -> Tuple[VDFInfo, VDFProof]: +) -> tuple[VDFInfo, VDFProof]: form_size = ClassgroupElement.get_size() result: bytes = prove( bytes(challenge_hash), diff --git a/chia/util/virtual_project_analysis.py b/chia/util/virtual_project_analysis.py index 2b6c31c13e8a..b5bf8f33a603 100644 --- a/chia/util/virtual_project_analysis.py +++ b/chia/util/virtual_project_analysis.py @@ -7,7 +7,7 @@ import sys from dataclasses import dataclass, field from pathlib import Path -from typing import Any, Callable, Dict, List, Literal, Optional, Tuple, Union +from typing import Any, Callable, Literal, Optional, Union import click import yaml @@ -52,8 +52,8 @@ def parse(cls, file_path: Path) -> ChiaFile: return cls(file_path, Annotation.parse(file_string)) -def build_dependency_graph(dir_params: DirectoryParameters) -> Dict[Path, List[Path]]: - dependency_graph: Dict[Path, List[Path]] = {} +def build_dependency_graph(dir_params: DirectoryParameters) -> dict[Path, list[Path]]: + dependency_graph: dict[Path, list[Path]] = {} for chia_file in dir_params.gather_non_empty_python_files(): dependency_graph[chia_file.path] = [] with open(chia_file.path, encoding="utf-8", errors="ignore") as f: @@ -82,14 +82,14 @@ def build_dependency_graph(dir_params: DirectoryParameters) -> Dict[Path, List[P def build_virtual_dependency_graph( - dir_params: DirectoryParameters, *, existing_graph: Optional[Dict[Path, List[Path]]] = None -) -> Dict[str, List[str]]: + dir_params: DirectoryParameters, *, existing_graph: Optional[dict[Path, list[Path]]] = None +) -> dict[str, list[str]]: if existing_graph is None: graph = build_dependency_graph(dir_params) else: graph = existing_graph - virtual_graph: Dict[str, List[str]] = {} + virtual_graph: dict[str, list[str]] = {} for file, imports in graph.items(): file_path = Path(file) root_file = ChiaFile.parse(file_path) @@ -113,7 +113,7 @@ class Cycle: dependent_package: str provider_path: Path provider_package: str - packages_after_provider: List[str] + packages_after_provider: list[str] def __repr__(self) -> str: return "".join( @@ -124,7 +124,7 @@ def __repr__(self) -> str: ) )[:-4] - def possible_edge_interpretations(self) -> List[Tuple[FileOrPackage, FileOrPackage]]: + def possible_edge_interpretations(self) -> list[tuple[FileOrPackage, FileOrPackage]]: edges_after_initial_files = [] provider = self.packages_after_provider[0] for next_provider in self.packages_after_provider[1:]: @@ -145,11 +145,11 @@ def possible_edge_interpretations(self) -> List[Tuple[FileOrPackage, FileOrPacka ] -def find_all_dependency_paths(dependency_graph: Dict[str, List[str]], start: str, end: str) -> List[List[str]]: +def find_all_dependency_paths(dependency_graph: dict[str, list[str]], start: str, end: str) -> list[list[str]]: all_paths = [] visited = set() - def dfs(current: str, target: str, path: List[str]) -> None: + def dfs(current: str, target: str, path: list[str]) -> None: if current in visited: return if current == target and len(path) > 0: @@ -164,13 +164,13 @@ def dfs(current: str, target: str, path: List[str]) -> None: def find_cycles( - graph: Dict[Path, List[Path]], - virtual_graph: Dict[str, List[str]], - excluded_paths: List[Path], - ignore_cycles_in: List[str], - ignore_specific_files: List[Path], - ignore_specific_edges: List[Tuple[FileOrPackage, FileOrPackage]], -) -> List[Cycle]: + graph: dict[Path, list[Path]], + virtual_graph: dict[str, list[str]], + excluded_paths: list[Path], + ignore_cycles_in: list[str], + ignore_specific_files: list[Path], + ignore_specific_edges: list[tuple[FileOrPackage, FileOrPackage]], +) -> list[Cycle]: # Initialize an accumulator for paths that are part of cycles. path_accumulator = [] # Iterate over each package (parent) in the graph. @@ -222,7 +222,7 @@ def find_cycles( return path_accumulator -def print_graph(graph: Union[Dict[str, List[str]], Dict[Path, List[Path]]]) -> None: +def print_graph(graph: Union[dict[str, list[str]], dict[Path, list[Path]]]) -> None: print(json.dumps({str(k): list(str(v) for v in vs) for k, vs in graph.items()}, indent=4)) @@ -234,9 +234,9 @@ def cli() -> None: @dataclass(frozen=True) class DirectoryParameters: dir_path: Path - excluded_paths: List[Path] = field(default_factory=list) + excluded_paths: list[Path] = field(default_factory=list) - def gather_non_empty_python_files(self) -> List[ChiaFile]: + def gather_non_empty_python_files(self) -> list[ChiaFile]: """ Gathers non-empty Python files in the specified directory while ignoring files and directories in the excluded paths. @@ -263,9 +263,9 @@ def gather_non_empty_python_files(self) -> List[ChiaFile]: @dataclass(frozen=True) class Config: directory_parameters: DirectoryParameters - ignore_cycles_in: List[str] - ignore_specific_files: List[Path] - ignore_specific_edges: List[Tuple[FileOrPackage, FileOrPackage]] # (parent, child) + ignore_cycles_in: list[str] + ignore_specific_files: list[Path] + ignore_specific_edges: list[tuple[FileOrPackage, FileOrPackage]] # (parent, child) @dataclass(frozen=True) @@ -296,7 +296,7 @@ def parse_file_or_package(identifier: str) -> FileOrPackage: return Package(identifier) -def parse_edge(user_string: str) -> Tuple[FileOrPackage, FileOrPackage]: +def parse_edge(user_string: str) -> tuple[FileOrPackage, FileOrPackage]: split_string = user_string.split("->") dependent_side = split_string[0].strip() provider_side = split_string[1].strip() @@ -329,9 +329,9 @@ def config(func: Callable[..., None]) -> Callable[..., None]: ) def inner(config_path: Optional[str], *args: Any, **kwargs: Any) -> None: exclude_paths = [] - ignore_cycles_in: List[str] = [] - ignore_specific_files: List[str] = [] - ignore_specific_edges: List[str] = [] + ignore_cycles_in: list[str] = [] + ignore_specific_files: list[str] = [] + ignore_specific_edges: list[str] = [] if config_path is not None: # Reading from the YAML configuration file with open(config_path) as file: diff --git a/chia/util/ws_message.py b/chia/util/ws_message.py index d5cffeb4f893..3d3186052abe 100644 --- a/chia/util/ws_message.py +++ b/chia/util/ws_message.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Any, Dict, Optional +from typing import Any, Optional from typing_extensions import TypedDict @@ -19,13 +19,13 @@ class WsRpcMessage(TypedDict): command: str ack: bool - data: Dict[str, Any] + data: dict[str, Any] request_id: str destination: str origin: str -def format_response(incoming_msg: WsRpcMessage, response_data: Dict[str, Any]) -> str: +def format_response(incoming_msg: WsRpcMessage, response_data: dict[str, Any]) -> str: """ Formats the response into standard format. """ @@ -42,12 +42,12 @@ def format_response(incoming_msg: WsRpcMessage, response_data: Dict[str, Any]) - return json_str -def create_payload(command: str, data: Dict[str, Any], origin: str, destination: str) -> str: +def create_payload(command: str, data: dict[str, Any], origin: str, destination: str) -> str: response = create_payload_dict(command, data, origin, destination) return dict_to_json_str(response) -def create_payload_dict(command: str, data: Optional[Dict[str, Any]], origin: str, destination: str) -> WsRpcMessage: +def create_payload_dict(command: str, data: Optional[dict[str, Any]], origin: str, destination: str) -> WsRpcMessage: if data is None: data = {} @@ -61,6 +61,6 @@ def create_payload_dict(command: str, data: Optional[Dict[str, Any]], origin: st ) -def pong() -> Dict[str, Any]: +def pong() -> dict[str, Any]: response = {"success": True} return response diff --git a/chia/wallet/cat_wallet/cat_info.py b/chia/wallet/cat_wallet/cat_info.py index 1d7608c3f9c5..3c9ae91aa7d4 100644 --- a/chia/wallet/cat_wallet/cat_info.py +++ b/chia/wallet/cat_wallet/cat_info.py @@ -1,7 +1,7 @@ from __future__ import annotations from dataclasses import dataclass -from typing import List, Optional, Tuple +from typing import Optional from chia.types.blockchain_format.program import Program from chia.types.blockchain_format.sized_bytes import bytes32 @@ -35,7 +35,7 @@ class CATCoinData(Streamable): class LegacyCATInfo(Streamable): limitations_program_hash: bytes32 my_tail: Optional[Program] # this is the program - lineage_proofs: List[Tuple[bytes32, Optional[LineageProof]]] # {coin.name(): lineage_proof} + lineage_proofs: list[tuple[bytes32, Optional[LineageProof]]] # {coin.name(): lineage_proof} @streamable @@ -43,5 +43,5 @@ class LegacyCATInfo(Streamable): class CRCATInfo(Streamable): limitations_program_hash: bytes32 my_tail: Optional[Program] # this is the program - authorized_providers: List[bytes32] + authorized_providers: list[bytes32] proofs_checker: ProofsChecker diff --git a/chia/wallet/cat_wallet/cat_outer_puzzle.py b/chia/wallet/cat_wallet/cat_outer_puzzle.py index 4fef84036efb..58b29212c965 100644 --- a/chia/wallet/cat_wallet/cat_outer_puzzle.py +++ b/chia/wallet/cat_wallet/cat_outer_puzzle.py @@ -1,7 +1,7 @@ from __future__ import annotations from dataclasses import dataclass -from typing import Any, Callable, Dict, List, Optional +from typing import Any, Callable, Optional from chia.types.blockchain_format.coin import Coin from chia.types.blockchain_format.program import Program @@ -33,7 +33,7 @@ def match(self, puzzle: UncurriedPuzzle) -> Optional[PuzzleInfo]: if args is None: return None _, tail_hash, inner_puzzle = args - constructor_dict: Dict[str, Any] = { + constructor_dict: dict[str, Any] = { "type": "CAT", "tail": "0x" + tail_hash.as_atom().hex(), } @@ -74,7 +74,7 @@ def construct(self, constructor: PuzzleInfo, inner_puzzle: Program) -> Program: def solve(self, constructor: PuzzleInfo, solver: Solver, inner_puzzle: Program, inner_solution: Program) -> Program: tail_hash: bytes32 = constructor["tail"] - spendable_cats: List[SpendableCAT] = [] + spendable_cats: list[SpendableCAT] = [] target_coin: Coin ring = [ *zip( diff --git a/chia/wallet/cat_wallet/cat_utils.py b/chia/wallet/cat_wallet/cat_utils.py index 3a423de2385e..0a634426d2dd 100644 --- a/chia/wallet/cat_wallet/cat_utils.py +++ b/chia/wallet/cat_wallet/cat_utils.py @@ -1,7 +1,8 @@ from __future__ import annotations import dataclasses -from typing import Iterator, List, Optional, Union +from collections.abc import Iterator +from typing import Optional, Union from chia_rs import G2Element @@ -77,7 +78,7 @@ def construct_cat_puzzle( return mod_code.curry(mod_code_hash, limitations_program_hash, inner_puzzle_or_hash) -def subtotals_for_deltas(deltas: List[int]) -> List[int]: +def subtotals_for_deltas(deltas: list[int]) -> list[int]: """ Given a list of deltas corresponding to input coins, create the "subtotals" list needed in solutions spending those coins. @@ -104,7 +105,7 @@ def next_info_for_spendable_cat(spendable_cat: SpendableCAT) -> Program: # This should probably return UnsignedSpendBundle if that type ever exists def unsigned_spend_bundle_for_spendable_cats( - mod_code: Program, spendable_cat_list: List[SpendableCAT] + mod_code: Program, spendable_cat_list: list[SpendableCAT] ) -> WalletSpendBundle: """ Given a list of `SpendableCAT` objects, create a `WalletSpendBundle` that spends all those coins. @@ -114,7 +115,7 @@ def unsigned_spend_bundle_for_spendable_cats( N = len(spendable_cat_list) # figure out what the deltas are by running the inner puzzles & solutions - deltas: List[int] = [] + deltas: list[int] = [] for spend_info in spendable_cat_list: conditions = conditions_dict_for_solution(spend_info.inner_puzzle, spend_info.inner_solution, INFINITE_COST) total = spend_info.extra_delta * -1 diff --git a/chia/wallet/cat_wallet/cat_wallet.py b/chia/wallet/cat_wallet/cat_wallet.py index 1943a2390a3a..d2e6493ecee3 100644 --- a/chia/wallet/cat_wallet/cat_wallet.py +++ b/chia/wallet/cat_wallet/cat_wallet.py @@ -4,7 +4,7 @@ import logging import time import traceback -from typing import TYPE_CHECKING, Any, ClassVar, Dict, List, Optional, Set, Tuple, cast +from typing import TYPE_CHECKING, Any, ClassVar, Optional, cast from chia_rs import G1Element from typing_extensions import Unpack @@ -69,12 +69,12 @@ QUOTED_MOD_HASH = calculate_hash_of_quoted_mod_hash(CAT_MOD_HASH) -def not_ephemeral_additions(sp: WalletSpendBundle) -> List[Coin]: - removals: Set[Coin] = set() +def not_ephemeral_additions(sp: WalletSpendBundle) -> list[Coin]: + removals: set[Coin] = set() for cs in sp.coin_spends: removals.add(cs.coin) - additions: List[Coin] = [] + additions: list[Coin] = [] max_cost = int(DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM) for cs in sp.coin_spends: coins, cost = compute_additions_with_cost(cs, max_cost=max_cost) @@ -106,7 +106,7 @@ def default_wallet_name_for_unknown_cat(limitations_program_hash_hex: str) -> st async def create_new_cat_wallet( wallet_state_manager: WalletStateManager, wallet: Wallet, - cat_tail_info: Dict[str, Any], + cat_tail_info: dict[str, Any], amount: uint64, action_scope: WalletActionScope, fee: uint64 = uint64(0), @@ -160,7 +160,7 @@ async def create_new_cat_wallet( await self.set_name(name) # Change and actual CAT coin - non_ephemeral_coins: List[Coin] = not_ephemeral_additions(spend_bundle) + non_ephemeral_coins: list[Coin] = not_ephemeral_additions(spend_bundle) cat_coin = None puzzle_store = self.wallet_state_manager.puzzle_store for c in non_ephemeral_coins: @@ -257,7 +257,7 @@ async def create_from_puzzle_info( puzzle_driver: PuzzleInfo, name: Optional[str] = None, # We're hinting this as Any for mypy by should explore adding this to the wallet protocol and hinting properly - potential_subclasses: Dict[AssetType, Any] = {}, + potential_subclasses: dict[AssetType, Any] = {}, ) -> Any: next_layer: Optional[PuzzleInfo] = puzzle_driver.also() if next_layer is not None: @@ -310,7 +310,7 @@ def type(cls) -> WalletType: def id(self) -> uint32: return self.wallet_info.id - async def get_confirmed_balance(self, record_list: Optional[Set[WalletCoinRecord]] = None) -> uint128: + async def get_confirmed_balance(self, record_list: Optional[set[WalletCoinRecord]] = None) -> uint128: if record_list is None: record_list = await self.wallet_state_manager.coin_store.get_unspent_coins_for_wallet(self.id()) @@ -323,7 +323,7 @@ async def get_confirmed_balance(self, record_list: Optional[Set[WalletCoinRecord self.log.info(f"Confirmed balance for cat wallet {self.id()} is {amount}") return uint128(amount) - async def get_unconfirmed_balance(self, unspent_records: Optional[Set[WalletCoinRecord]] = None) -> uint128: + async def get_unconfirmed_balance(self, unspent_records: Optional[set[WalletCoinRecord]] = None) -> uint128: return await self.wallet_state_manager.get_unconfirmed_balance(self.id(), unspent_records) @property @@ -335,14 +335,14 @@ def max_send_quantity(self) -> int: # avoid full block TXs return int(self.wallet_state_manager.constants.MAX_BLOCK_COST_CLVM / 2 / self.cost_of_single_tx) - async def get_max_spendable_coins(self, records: Optional[Set[WalletCoinRecord]] = None) -> Set[WalletCoinRecord]: - spendable: List[WalletCoinRecord] = list( + async def get_max_spendable_coins(self, records: Optional[set[WalletCoinRecord]] = None) -> set[WalletCoinRecord]: + spendable: list[WalletCoinRecord] = list( await self.wallet_state_manager.get_spendable_coins_for_wallet(self.id(), records) ) spendable.sort(reverse=True, key=lambda record: record.coin.amount) return set(spendable[0 : min(len(spendable), self.max_send_quantity)]) - async def get_max_send_amount(self, records: Optional[Set[WalletCoinRecord]] = None) -> uint128: + async def get_max_send_amount(self, records: Optional[set[WalletCoinRecord]] = None) -> uint128: return uint128(sum(cr.coin.amount for cr in await self.get_max_spendable_coins())) def get_name(self) -> str: @@ -459,7 +459,7 @@ def puzzle_hash_for_pk(self, pubkey: G1Element) -> bytes32: async def get_new_cat_puzzle_hash(self) -> bytes32: return (await self.wallet_state_manager.get_unused_derivation_record(self.id())).puzzle_hash - async def get_spendable_balance(self, records: Optional[Set[WalletCoinRecord]] = None) -> uint128: + async def get_spendable_balance(self, records: Optional[set[WalletCoinRecord]] = None) -> uint128: coins = await self.get_cat_spendable_coins(records) amount = 0 for record in coins: @@ -493,10 +493,10 @@ async def get_pending_change_balance(self) -> uint64: return uint64(addition_amount) - async def get_cat_spendable_coins(self, records: Optional[Set[WalletCoinRecord]] = None) -> List[WalletCoinRecord]: - result: List[WalletCoinRecord] = [] + async def get_cat_spendable_coins(self, records: Optional[set[WalletCoinRecord]] = None) -> list[WalletCoinRecord]: + result: list[WalletCoinRecord] = [] - record_list: Set[WalletCoinRecord] = await self.wallet_state_manager.get_spendable_coins_for_wallet( + record_list: set[WalletCoinRecord] = await self.wallet_state_manager.get_spendable_coins_for_wallet( self.id(), records ) @@ -511,17 +511,17 @@ async def select_coins( self, amount: uint64, action_scope: WalletActionScope, - ) -> Set[Coin]: + ) -> set[Coin]: """ Returns a set of coins that can be used for generating a new transaction. Note: Must be called under wallet state manager lock """ spendable_amount: uint128 = await self.get_spendable_balance() - spendable_coins: List[WalletCoinRecord] = await self.get_cat_spendable_coins() + spendable_coins: list[WalletCoinRecord] = await self.get_cat_spendable_coins() # Try to use coins from the store, if there isn't enough of "unused" # coins use change coins that are not confirmed yet - unconfirmed_removals: Dict[bytes32, Coin] = await self.wallet_state_manager.unconfirmed_removals_for_wallet( + unconfirmed_removals: dict[bytes32, Coin] = await self.wallet_state_manager.unconfirmed_removals_for_wallet( self.id() ) async with action_scope.use() as interface: @@ -563,7 +563,7 @@ async def create_tandem_xch_tx( fee: uint64, amount_to_claim: uint64, action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> Optional[AssertCoinAnnouncement]: """ This function creates a non-CAT transaction to pay fees, contribute funds for issuance, and absorb melt value. @@ -627,12 +627,12 @@ async def create_tandem_xch_tx( async def generate_unsigned_spendbundle( self, - payments: List[Payment], + payments: list[Payment], action_scope: WalletActionScope, fee: uint64 = uint64(0), - cat_discrepancy: Optional[Tuple[int, Program, Program]] = None, # (extra_delta, tail_reveal, tail_solution) - coins: Optional[Set[Coin]] = None, - extra_conditions: Tuple[Condition, ...] = tuple(), + cat_discrepancy: Optional[tuple[int, Program, Program]] = None, # (extra_delta, tail_reveal, tail_solution) + coins: Optional[set[Coin]] = None, + extra_conditions: tuple[Condition, ...] = tuple(), ) -> WalletSpendBundle: if cat_discrepancy is not None: extra_delta, tail_reveal, tail_solution = cat_discrepancy @@ -756,17 +756,17 @@ async def generate_unsigned_spendbundle( async def generate_signed_transaction( self, - amounts: List[uint64], - puzzle_hashes: List[bytes32], + amounts: list[uint64], + puzzle_hashes: list[bytes32], action_scope: WalletActionScope, fee: uint64 = uint64(0), - coins: Optional[Set[Coin]] = None, - memos: Optional[List[List[bytes]]] = None, - extra_conditions: Tuple[Condition, ...] = tuple(), + coins: Optional[set[Coin]] = None, + memos: Optional[list[list[bytes]]] = None, + extra_conditions: tuple[Condition, ...] = tuple(), **kwargs: Unpack[GSTOptionalArgs], ) -> None: # (extra_delta, tail_reveal, tail_solution) - cat_discrepancy: Optional[Tuple[int, Program, Program]] = kwargs.get("cat_discrepancy", None) + cat_discrepancy: Optional[tuple[int, Program, Program]] = kwargs.get("cat_discrepancy", None) if memos is None: memos = [[] for _ in range(len(puzzle_hashes))] @@ -775,7 +775,7 @@ async def generate_signed_transaction( payments = [] for amount, puzhash, memo_list in zip(amounts, puzzle_hashes, memos): - memos_with_hint: List[bytes] = [puzhash] + memos_with_hint: list[bytes] = [puzhash] memos_with_hint.extend(memo_list) payments.append(Payment(puzhash, amount, memos_with_hint)) @@ -790,10 +790,10 @@ async def generate_signed_transaction( ) async with action_scope.use() as interface: - other_tx_removals: Set[Coin] = { + other_tx_removals: set[Coin] = { removal for tx in interface.side_effects.transactions for removal in tx.removals } - other_tx_additions: Set[Coin] = { + other_tx_additions: set[Coin] = { removal for tx in interface.side_effects.transactions for removal in tx.additions } interface.side_effects.transactions.append( @@ -854,7 +854,7 @@ async def get_coins_to_offer( asset_id: Optional[bytes32], amount: uint64, action_scope: WalletActionScope, - ) -> Set[Coin]: + ) -> set[Coin]: balance = await self.get_confirmed_balance() if balance < amount: raise Exception(f"insufficient funds in wallet {self.id()}") diff --git a/chia/wallet/cat_wallet/dao_cat_info.py b/chia/wallet/cat_wallet/dao_cat_info.py index a40a8cafa042..322383505b32 100644 --- a/chia/wallet/cat_wallet/dao_cat_info.py +++ b/chia/wallet/cat_wallet/dao_cat_info.py @@ -1,7 +1,7 @@ from __future__ import annotations from dataclasses import dataclass -from typing import List, Optional +from typing import Optional from chia.types.blockchain_format.coin import Coin from chia.types.blockchain_format.program import Program @@ -15,7 +15,7 @@ class LockedCoinInfo(Streamable): coin: Coin inner_puzzle: Program # This is the lockup puzzle, not the lockup_puzzle's inner_puzzle - active_votes: List[Optional[bytes32]] + active_votes: list[Optional[bytes32]] @streamable @@ -25,4 +25,4 @@ class DAOCATInfo(Streamable): free_cat_wallet_id: uint64 limitations_program_hash: bytes32 my_tail: Optional[Program] # this is the program - locked_coins: List[LockedCoinInfo] + locked_coins: list[LockedCoinInfo] diff --git a/chia/wallet/cat_wallet/dao_cat_wallet.py b/chia/wallet/cat_wallet/dao_cat_wallet.py index 20e413f9d256..c931cc2c38fc 100644 --- a/chia/wallet/cat_wallet/dao_cat_wallet.py +++ b/chia/wallet/cat_wallet/dao_cat_wallet.py @@ -2,7 +2,7 @@ import logging import time -from typing import TYPE_CHECKING, Any, ClassVar, List, Optional, Set, Tuple, cast +from typing import TYPE_CHECKING, Any, ClassVar, Optional, cast from chia_rs import G1Element @@ -152,7 +152,7 @@ async def coin_added(self, coin: Coin, height: uint32, peer: WSChiaConnection, c parent_spend = await fetch_coin_spend(height, parent_coin.coin, peer) uncurried = parent_spend.puzzle_reveal.uncurry() cat_inner = uncurried[1].at("rrf") - active_votes_list: List[Optional[bytes32]] = [] + active_votes_list: list[Optional[bytes32]] = [] record = await self.wallet_state_manager.puzzle_store.get_derivation_record_for_puzzle_hash(coin.puzzle_hash) if record: @@ -227,7 +227,7 @@ async def remove_lineage(self, name: bytes32) -> None: # pragma: no cover self.log.info(f"Removing parent {name} (probably had a non-CAT parent)") await self.lineage_store.remove_lineage_proof(name) - async def advanced_select_coins(self, amount: uint64, proposal_id: bytes32) -> List[LockedCoinInfo]: + async def advanced_select_coins(self, amount: uint64, proposal_id: bytes32) -> list[LockedCoinInfo]: coins = [] s = 0 for coin in self.dao_cat_info.locked_coins: @@ -258,7 +258,7 @@ async def create_vote_spend( is_yes_vote: bool, proposal_puzzle: Optional[Program] = None, ) -> WalletSpendBundle: - coins: List[LockedCoinInfo] = await self.advanced_select_coins(amount, proposal_id) + coins: list[LockedCoinInfo] = await self.advanced_select_coins(amount, proposal_id) running_sum = 0 # this will be used for change calculation change = sum(c.coin.amount for c in coins) - amount extra_delta, limitations_solution = 0, Program.to([]) @@ -365,8 +365,8 @@ async def enter_dao_cat_voting_mode( amount: uint64, action_scope: WalletActionScope, fee: uint64 = uint64(0), - extra_conditions: Tuple[Condition, ...] = tuple(), - ) -> List[TransactionRecord]: + extra_conditions: tuple[Condition, ...] = tuple(), + ) -> list[TransactionRecord]: """ Enter existing CATs for the DAO into voting mode """ @@ -378,7 +378,7 @@ async def enter_dao_cat_voting_mode( # get the lockup puzzle hash lockup_puzzle = await self.get_new_puzzle() # create the cat spend - txs: List[TransactionRecord] = await cat_wallet.generate_signed_transaction( + txs: list[TransactionRecord] = await cat_wallet.generate_signed_transaction( [amount], [lockup_puzzle.get_tree_hash()], action_scope, @@ -393,10 +393,10 @@ async def enter_dao_cat_voting_mode( async def exit_vote_state( self, - coins: List[LockedCoinInfo], + coins: list[LockedCoinInfo], action_scope: WalletActionScope, fee: uint64 = uint64(0), - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> None: extra_delta, limitations_solution = 0, Program.to([]) limitations_program_reveal = Program.to([]) @@ -495,11 +495,11 @@ async def exit_vote_state( async def remove_active_proposal( self, - proposal_id_list: List[bytes32], + proposal_id_list: list[bytes32], action_scope: WalletActionScope, fee: uint64 = uint64(0), ) -> WalletSpendBundle: - locked_coins: List[Tuple[LockedCoinInfo, List[bytes32]]] = [] + locked_coins: list[tuple[LockedCoinInfo, list[bytes32]]] = [] for lci in self.dao_cat_info.locked_coins: my_finished_proposals = [] for active_vote in lci.active_votes: @@ -609,16 +609,16 @@ def require_derivation_paths(self) -> bool: async def match_hinted_coin(self, coin: Coin, hint: bytes32) -> bool: raise NotImplementedError("Method not implemented for DAO CAT Wallet") # pragma: no cover - async def get_spendable_balance(self, records: Optional[Set[WalletCoinRecord]] = None) -> uint128: + async def get_spendable_balance(self, records: Optional[set[WalletCoinRecord]] = None) -> uint128: return uint128(0) - async def get_confirmed_balance(self, record_list: Optional[Set[WalletCoinRecord]] = None) -> uint128: + async def get_confirmed_balance(self, record_list: Optional[set[WalletCoinRecord]] = None) -> uint128: amount = 0 for coin in self.dao_cat_info.locked_coins: amount += coin.coin.amount return uint128(amount) - async def get_unconfirmed_balance(self, unspent_records: Optional[Set[WalletCoinRecord]] = None) -> uint128: + async def get_unconfirmed_balance(self, unspent_records: Optional[set[WalletCoinRecord]] = None) -> uint128: return uint128(0) async def get_pending_change_balance(self) -> uint64: @@ -628,10 +628,10 @@ async def select_coins( self, amount: uint64, action_scope: WalletActionScope, - ) -> Set[Coin]: + ) -> set[Coin]: return set() - async def get_max_send_amount(self, unspent_records: Optional[Set[WalletCoinRecord]] = None) -> uint128: + async def get_max_send_amount(self, unspent_records: Optional[set[WalletCoinRecord]] = None) -> uint128: return uint128(0) async def get_votable_balance( diff --git a/chia/wallet/cat_wallet/lineage_store.py b/chia/wallet/cat_wallet/lineage_store.py index 63ea784e007e..db99f18a9f41 100644 --- a/chia/wallet/cat_wallet/lineage_store.py +++ b/chia/wallet/cat_wallet/lineage_store.py @@ -1,7 +1,7 @@ from __future__ import annotations import logging -from typing import Dict, Optional +from typing import Optional from chia.types.blockchain_format.sized_bytes import bytes32 from chia.util.db_wrapper import DBWrapper2 @@ -60,7 +60,7 @@ async def get_lineage_proof(self, coin_id: bytes32) -> Optional[LineageProof]: return None - async def get_all_lineage_proofs(self) -> Dict[bytes32, LineageProof]: + async def get_all_lineage_proofs(self) -> dict[bytes32, LineageProof]: async with self.db_wrapper.reader_no_transaction() as conn: cursor = await conn.execute(f"SELECT * FROM {self.table_name}") rows = await cursor.fetchall() diff --git a/chia/wallet/coin_selection.py b/chia/wallet/coin_selection.py index 394be49495a0..8d83ee36057a 100644 --- a/chia/wallet/coin_selection.py +++ b/chia/wallet/coin_selection.py @@ -2,7 +2,7 @@ import logging import random -from typing import Dict, List, Optional, Set +from typing import Optional from chia.types.blockchain_format.coin import Coin from chia.types.blockchain_format.sized_bytes import bytes32 @@ -14,11 +14,11 @@ async def select_coins( spendable_amount: uint128, coin_selection_config: CoinSelectionConfig, - spendable_coins: List[WalletCoinRecord], - unconfirmed_removals: Dict[bytes32, Coin], + spendable_coins: list[WalletCoinRecord], + unconfirmed_removals: dict[bytes32, Coin], log: logging.Logger, amount: uint128, -) -> Set[Coin]: +) -> set[Coin]: """ Returns a set of coins that can be used for generating a new transaction. """ @@ -33,7 +33,7 @@ async def select_coins( max_num_coins = 500 sum_spendable_coins = 0 - valid_spendable_coins: List[Coin] = [] + valid_spendable_coins: list[Coin] = [] for coin_record in spendable_coins: # remove all the unconfirmed coins, excluded coins and dust. coin_name: bytes32 = coin_record.coin.name() @@ -76,7 +76,7 @@ async def select_coins( # Check for an exact match with all of the coins smaller than the amount. # If we have more, smaller coins than the amount we run the next algorithm. smaller_coin_sum = 0 # coins smaller than target. - smaller_coins: List[Coin] = [] + smaller_coins: list[Coin] = [] for coin in valid_spendable_coins: if coin.amount < amount: smaller_coin_sum += coin.amount @@ -90,7 +90,7 @@ async def select_coins( log.debug(f"Selected closest greater coin: {smallest_coin.name()}") return {smallest_coin} elif smaller_coin_sum > amount: - coin_set: Optional[Set[Coin]] = knapsack_coin_algorithm( + coin_set: Optional[set[Coin]] = knapsack_coin_algorithm( smaller_coins, amount, coin_selection_config.max_coin_amount, max_num_coins ) log.debug(f"Selected coins from knapsack algorithm: {coin_set}") @@ -119,7 +119,7 @@ async def select_coins( # we use this to check if one of the coins exactly matches the target. -def check_for_exact_match(coin_list: List[Coin], target: uint64) -> Optional[Coin]: +def check_for_exact_match(coin_list: list[Coin], target: uint64) -> Optional[Coin]: for coin in coin_list: if coin.amount == target: return coin @@ -128,7 +128,7 @@ def check_for_exact_match(coin_list: List[Coin], target: uint64) -> Optional[Coi # amount of coins smaller than target, followed by a list of all valid spendable coins. # Coins must be sorted in descending amount order. -def select_smallest_coin_over_target(target: uint128, sorted_coin_list: List[Coin]) -> Optional[Coin]: +def select_smallest_coin_over_target(target: uint128, sorted_coin_list: list[Coin]) -> Optional[Coin]: if sorted_coin_list[0].amount < target: return None for coin in reversed(sorted_coin_list): @@ -140,15 +140,15 @@ def select_smallest_coin_over_target(target: uint128, sorted_coin_list: List[Coi # we use this to find the set of coins which have total value closest to the target, but at least the target. # IMPORTANT: The coins have to be sorted in descending order or else this function will not work. def knapsack_coin_algorithm( - smaller_coins: List[Coin], target: uint128, max_coin_amount: int, max_num_coins: int, seed: bytes = b"knapsack seed" -) -> Optional[Set[Coin]]: + smaller_coins: list[Coin], target: uint128, max_coin_amount: int, max_num_coins: int, seed: bytes = b"knapsack seed" +) -> Optional[set[Coin]]: best_set_sum = max_coin_amount - best_set_of_coins: Optional[Set[Coin]] = None + best_set_of_coins: Optional[set[Coin]] = None ran: random.Random = random.Random() ran.seed(seed) for i in range(1000): # reset these variables every loop. - selected_coins: Set[Coin] = set() + selected_coins: set[Coin] = set() selected_coins_sum = 0 n_pass = 0 target_reached = False @@ -177,9 +177,9 @@ def knapsack_coin_algorithm( # Adds up the largest coins in the list, resulting in the minimum number of selected coins. A solution # is guaranteed if and only if the sum(coins) >= target. Coins must be sorted in descending amount order. -def sum_largest_coins(target: uint128, sorted_coins: List[Coin]) -> Optional[Set[Coin]]: +def sum_largest_coins(target: uint128, sorted_coins: list[Coin]) -> Optional[set[Coin]]: total_value = 0 - selected_coins: Set[Coin] = set() + selected_coins: set[Coin] = set() for coin in sorted_coins: total_value += coin.amount selected_coins.add(coin) diff --git a/chia/wallet/conditions.py b/chia/wallet/conditions.py index 635ec22c50b4..57a7539ea46e 100644 --- a/chia/wallet/conditions.py +++ b/chia/wallet/conditions.py @@ -1,8 +1,9 @@ from __future__ import annotations from abc import ABC, abstractmethod +from collections.abc import Iterable from dataclasses import dataclass, fields, replace -from typing import Any, Dict, Iterable, List, Optional, Set, Tuple, Type, TypeVar, Union, final, get_type_hints +from typing import Any, Optional, TypeVar, Union, final, get_type_hints from chia_rs import G1Element from clvm.casts import int_from_bytes, int_to_bytes @@ -23,7 +24,7 @@ def to_program(self) -> Program: ... @classmethod @abstractmethod - def from_program(cls: Type[_T_Condition], program: Program) -> _T_Condition: ... + def from_program(cls: type[_T_Condition], program: Program) -> _T_Condition: ... @final @@ -226,7 +227,7 @@ def from_program( class CreateCoin(Condition): puzzle_hash: bytes32 amount: uint64 - memos: Optional[List[bytes]] = None + memos: Optional[list[bytes]] = None def to_program(self) -> Program: condition_args = [ConditionOpcode.CREATE_COIN, self.puzzle_hash, self.amount] @@ -736,7 +737,7 @@ def from_program(cls, program: Program) -> AssertBeforeHeightAbsolute: @dataclass(frozen=True) class Softfork(Condition): cost: uint64 - conditions: List[Program] + conditions: list[Program] def to_program(self) -> Program: condition: Program = Program.to([ConditionOpcode.SOFTFORK, self.cost, self.conditions]) @@ -772,7 +773,7 @@ def from_program(cls, program: Program) -> Remark: @dataclass(frozen=True) class UnknownCondition(Condition): opcode: Program - args: List[Program] + args: list[Program] def to_program(self) -> Program: return self.opcode.cons(Program.to(self.args)) @@ -923,15 +924,15 @@ def from_program(cls, program: Program, **kwargs: Optional[bytes32]) -> AssertAn ] -TIMELOCK_DRIVERS: Tuple[ - Type[TIMELOCK_TYPES], - Type[TIMELOCK_TYPES], - Type[TIMELOCK_TYPES], - Type[TIMELOCK_TYPES], - Type[TIMELOCK_TYPES], - Type[TIMELOCK_TYPES], - Type[TIMELOCK_TYPES], - Type[TIMELOCK_TYPES], +TIMELOCK_DRIVERS: tuple[ + type[TIMELOCK_TYPES], + type[TIMELOCK_TYPES], + type[TIMELOCK_TYPES], + type[TIMELOCK_TYPES], + type[TIMELOCK_TYPES], + type[TIMELOCK_TYPES], + type[TIMELOCK_TYPES], + type[TIMELOCK_TYPES], ] = ( AssertSecondsRelative, AssertHeightRelative, @@ -942,46 +943,46 @@ def from_program(cls, program: Program, **kwargs: Optional[bytes32]) -> AssertAn AssertBeforeSecondsAbsolute, AssertBeforeHeightAbsolute, ) -SECONDS_TIMELOCK_DRIVERS: Set[Type[TIMELOCK_TYPES]] = { +SECONDS_TIMELOCK_DRIVERS: set[type[TIMELOCK_TYPES]] = { AssertSecondsRelative, AssertSecondsAbsolute, AssertBeforeSecondsRelative, AssertBeforeSecondsAbsolute, } -HEIGHT_TIMELOCK_DRIVERS: Set[Type[TIMELOCK_TYPES]] = { +HEIGHT_TIMELOCK_DRIVERS: set[type[TIMELOCK_TYPES]] = { AssertHeightRelative, AssertHeightAbsolute, AssertBeforeHeightRelative, AssertBeforeHeightAbsolute, } -AFTER_TIMELOCK_DRIVERS: Set[Type[TIMELOCK_TYPES]] = { +AFTER_TIMELOCK_DRIVERS: set[type[TIMELOCK_TYPES]] = { AssertSecondsRelative, AssertHeightRelative, AssertSecondsAbsolute, AssertHeightAbsolute, } -BEFORE_TIMELOCK_DRIVERS: Set[Type[TIMELOCK_TYPES]] = { +BEFORE_TIMELOCK_DRIVERS: set[type[TIMELOCK_TYPES]] = { AssertBeforeSecondsRelative, AssertBeforeHeightRelative, AssertBeforeSecondsAbsolute, AssertBeforeHeightAbsolute, } -RELATIVE_TIMELOCK_DRIVERS: Set[Type[TIMELOCK_TYPES]] = { +RELATIVE_TIMELOCK_DRIVERS: set[type[TIMELOCK_TYPES]] = { AssertSecondsRelative, AssertHeightRelative, AssertBeforeSecondsRelative, AssertBeforeHeightRelative, } -ABSOLUTE_TIMELOCK_DRIVERS: Set[Type[TIMELOCK_TYPES]] = { +ABSOLUTE_TIMELOCK_DRIVERS: set[type[TIMELOCK_TYPES]] = { AssertSecondsAbsolute, AssertHeightAbsolute, AssertBeforeSecondsAbsolute, AssertBeforeHeightAbsolute, } -TIMELOCK_DRIVERS_SET: Set[Type[TIMELOCK_TYPES]] = set(TIMELOCK_DRIVERS) +TIMELOCK_DRIVERS_SET: set[type[TIMELOCK_TYPES]] = set(TIMELOCK_DRIVERS) -TIMELOCK_OPCODES: Set[bytes] = { +TIMELOCK_OPCODES: set[bytes] = { ConditionOpcode.ASSERT_SECONDS_RELATIVE.value, ConditionOpcode.ASSERT_HEIGHT_RELATIVE.value, ConditionOpcode.ASSERT_SECONDS_ABSOLUTE.value, @@ -991,37 +992,37 @@ def from_program(cls, program: Program, **kwargs: Optional[bytes32]) -> AssertAn ConditionOpcode.ASSERT_BEFORE_SECONDS_ABSOLUTE.value, ConditionOpcode.ASSERT_BEFORE_HEIGHT_ABSOLUTE.value, } -SECONDS_TIMELOCK_OPCODES: Set[bytes] = { +SECONDS_TIMELOCK_OPCODES: set[bytes] = { ConditionOpcode.ASSERT_SECONDS_RELATIVE.value, ConditionOpcode.ASSERT_SECONDS_ABSOLUTE.value, ConditionOpcode.ASSERT_BEFORE_SECONDS_RELATIVE.value, ConditionOpcode.ASSERT_BEFORE_SECONDS_ABSOLUTE.value, } -HEIGHT_TIMELOCK_OPCODES: Set[bytes] = { +HEIGHT_TIMELOCK_OPCODES: set[bytes] = { ConditionOpcode.ASSERT_HEIGHT_RELATIVE.value, ConditionOpcode.ASSERT_HEIGHT_ABSOLUTE.value, ConditionOpcode.ASSERT_BEFORE_HEIGHT_RELATIVE.value, ConditionOpcode.ASSERT_BEFORE_HEIGHT_ABSOLUTE.value, } -AFTER_TIMELOCK_OPCODES: Set[bytes] = { +AFTER_TIMELOCK_OPCODES: set[bytes] = { ConditionOpcode.ASSERT_SECONDS_RELATIVE.value, ConditionOpcode.ASSERT_HEIGHT_RELATIVE.value, ConditionOpcode.ASSERT_SECONDS_ABSOLUTE.value, ConditionOpcode.ASSERT_HEIGHT_ABSOLUTE.value, } -BEFORE_TIMELOCK_OPCODES: Set[bytes] = { +BEFORE_TIMELOCK_OPCODES: set[bytes] = { ConditionOpcode.ASSERT_BEFORE_SECONDS_RELATIVE.value, ConditionOpcode.ASSERT_BEFORE_HEIGHT_RELATIVE.value, ConditionOpcode.ASSERT_BEFORE_SECONDS_ABSOLUTE.value, ConditionOpcode.ASSERT_BEFORE_HEIGHT_ABSOLUTE.value, } -RELATIVE_TIMELOCK_OPCODES: Set[bytes] = { +RELATIVE_TIMELOCK_OPCODES: set[bytes] = { ConditionOpcode.ASSERT_SECONDS_RELATIVE.value, ConditionOpcode.ASSERT_HEIGHT_RELATIVE.value, ConditionOpcode.ASSERT_BEFORE_SECONDS_RELATIVE.value, ConditionOpcode.ASSERT_BEFORE_HEIGHT_RELATIVE.value, } -ABSOLUTE_TIMELOCK_OPCODES: Set[bytes] = { +ABSOLUTE_TIMELOCK_OPCODES: set[bytes] = { ConditionOpcode.ASSERT_SECONDS_ABSOLUTE.value, ConditionOpcode.ASSERT_HEIGHT_ABSOLUTE.value, ConditionOpcode.ASSERT_BEFORE_SECONDS_ABSOLUTE.value, @@ -1054,7 +1055,7 @@ def to_program(self) -> Program: else: potential_drivers -= SECONDS_TIMELOCK_DRIVERS - driver: Type[TIMELOCK_TYPES] = next(iter(potential_drivers)) + driver: type[TIMELOCK_TYPES] = next(iter(potential_drivers)) if self.seconds_not_height: # Semantics here mean that we're assuredly passing a uint64 to a class that expects it @@ -1089,7 +1090,7 @@ def from_program(cls, program: Program) -> Timelock: ) -CONDITION_DRIVERS: Dict[bytes, Type[Condition]] = { +CONDITION_DRIVERS: dict[bytes, type[Condition]] = { ConditionOpcode.AGG_SIG_PARENT.value: AggSigParent, ConditionOpcode.AGG_SIG_PUZZLE.value: AggSigPuzzle, ConditionOpcode.AGG_SIG_AMOUNT.value: AggSigAmount, @@ -1126,10 +1127,10 @@ def from_program(cls, program: Program) -> Timelock: ConditionOpcode.SOFTFORK.value: Softfork, ConditionOpcode.REMARK.value: Remark, } -DRIVERS_TO_OPCODES: Dict[Type[Condition], bytes] = {v: k for k, v in CONDITION_DRIVERS.items()} +DRIVERS_TO_OPCODES: dict[type[Condition], bytes] = {v: k for k, v in CONDITION_DRIVERS.items()} -CONDITION_DRIVERS_W_ABSTRACTIONS: Dict[bytes, Type[Condition]] = { +CONDITION_DRIVERS_W_ABSTRACTIONS: dict[bytes, type[Condition]] = { ConditionOpcode.AGG_SIG_PARENT.value: AggSig, ConditionOpcode.AGG_SIG_PUZZLE.value: AggSig, ConditionOpcode.AGG_SIG_AMOUNT.value: AggSig, @@ -1171,11 +1172,11 @@ def from_program(cls, program: Program) -> Timelock: def parse_conditions_non_consensus( conditions: Iterable[Program], abstractions: bool = True, # Use abstractions like *Announcement or Timelock instead of specific condition class -) -> List[Condition]: - driver_dictionary: Dict[bytes, Type[Condition]] = ( +) -> list[Condition]: + driver_dictionary: dict[bytes, type[Condition]] = ( CONDITION_DRIVERS_W_ABSTRACTIONS if abstractions else CONDITION_DRIVERS ) - final_condition_list: List[Condition] = [] + final_condition_list: list[Condition] = [] for condition in conditions: try: final_condition_list.append(driver_dictionary[condition.at("f").as_atom()].from_program(condition)) @@ -1185,8 +1186,8 @@ def parse_conditions_non_consensus( return final_condition_list -def conditions_from_json_dicts(conditions: Iterable[Dict[str, Any]]) -> List[Condition]: - final_condition_list: List[Condition] = [] +def conditions_from_json_dicts(conditions: Iterable[dict[str, Any]]) -> list[Condition]: + final_condition_list: list[Condition] = [] for condition in conditions: opcode_specified: Union[str, int] = condition["opcode"] if isinstance(opcode_specified, str): @@ -1209,7 +1210,7 @@ def conditions_from_json_dicts(conditions: Iterable[Dict[str, Any]]) -> List[Con return final_condition_list -def conditions_to_json_dicts(conditions: Iterable[Condition]) -> List[Dict[str, Any]]: +def conditions_to_json_dicts(conditions: Iterable[Condition]) -> list[dict[str, Any]]: return [ { "opcode": int_from_bytes(DRIVERS_TO_OPCODES[condition.__class__]), @@ -1231,8 +1232,8 @@ class ConditionValidTimes(Streamable): max_blocks_after_created: Optional[uint32] = None # ASSERT_BEFORE_HEIGHT_RELATIVE max_height: Optional[uint32] = None # ASSERT_BEFORE_HEIGHT_ABSOLUTE - def to_conditions(self) -> List[Condition]: - final_condition_list: List[Condition] = [] + def to_conditions(self) -> list[Condition]: + final_condition_list: list[Condition] = [] if self.min_secs_since_created is not None: final_condition_list.append(AssertSecondsRelative(self.min_secs_since_created)) if self.min_time is not None: @@ -1254,7 +1255,7 @@ def to_conditions(self) -> List[Condition]: condition_valid_times_hints = get_type_hints(ConditionValidTimes) -condition_valid_times_types: Dict[str, Type[int]] = {} +condition_valid_times_types: dict[str, type[int]] = {} for field in fields(ConditionValidTimes): hint = condition_valid_times_hints[field.name] [type_] = [type_ for type_ in hint.__args__ if type_ is not type(None)] @@ -1262,23 +1263,23 @@ def to_conditions(self) -> List[Condition]: # Properties of the dataclass above, grouped by their property -SECONDS_PROPERTIES: Set[str] = {"min_secs_since_created", "min_time", "max_secs_after_created", "max_time"} -HEIGHT_PROPERTIES: Set[str] = {"min_blocks_since_created", "min_height", "max_blocks_after_created", "max_height"} -AFTER_PROPERTIES: Set[str] = {"min_blocks_since_created", "min_height", "min_secs_since_created", "min_time"} -BEFORE_PROPERTIES: Set[str] = {"max_blocks_after_created", "max_height", "max_secs_after_created", "max_time"} -RELATIVE_PROPERTIES: Set[str] = { +SECONDS_PROPERTIES: set[str] = {"min_secs_since_created", "min_time", "max_secs_after_created", "max_time"} +HEIGHT_PROPERTIES: set[str] = {"min_blocks_since_created", "min_height", "max_blocks_after_created", "max_height"} +AFTER_PROPERTIES: set[str] = {"min_blocks_since_created", "min_height", "min_secs_since_created", "min_time"} +BEFORE_PROPERTIES: set[str] = {"max_blocks_after_created", "max_height", "max_secs_after_created", "max_time"} +RELATIVE_PROPERTIES: set[str] = { "min_blocks_since_created", "min_secs_since_created", "max_secs_after_created", "max_blocks_after_created", } -ABSOLUTE_PROPERTIES: Set[str] = {"min_time", "max_time", "min_height", "max_height"} -ALL_PROPERTIES: Set[str] = SECONDS_PROPERTIES | HEIGHT_PROPERTIES +ABSOLUTE_PROPERTIES: set[str] = {"min_time", "max_time", "min_height", "max_height"} +ALL_PROPERTIES: set[str] = SECONDS_PROPERTIES | HEIGHT_PROPERTIES def parse_timelock_info(conditions: Iterable[Condition]) -> ConditionValidTimes: valid_times: ConditionValidTimes = ConditionValidTimes() - properties: Set[str] = ALL_PROPERTIES.copy() + properties: set[str] = ALL_PROPERTIES.copy() for condition in conditions: if isinstance(condition, TIMELOCK_DRIVERS): timelock: Timelock = Timelock.from_program(condition.to_program()) @@ -1319,7 +1320,7 @@ def parse_timelock_info(conditions: Iterable[Condition]) -> ConditionValidTimes: new_value = timelock.timestamp final_type = condition_valid_times_types[final_property] - replacement: Dict[str, int] = {final_property: final_type(new_value)} + replacement: dict[str, int] = {final_property: final_type(new_value)} # the type is enforced above valid_times = replace(valid_times, **replacement) # type: ignore[arg-type] diff --git a/chia/wallet/dao_wallet/dao_info.py b/chia/wallet/dao_wallet/dao_info.py index 02e9b1134330..a059c84d084a 100644 --- a/chia/wallet/dao_wallet/dao_info.py +++ b/chia/wallet/dao_wallet/dao_info.py @@ -2,7 +2,7 @@ from dataclasses import dataclass from enum import Enum -from typing import List, Optional, Tuple +from typing import Optional from chia.types.blockchain_format.coin import Coin from chia.types.blockchain_format.program import Program @@ -33,13 +33,13 @@ class DAOInfo(Streamable): treasury_id: bytes32 cat_wallet_id: uint32 dao_cat_wallet_id: uint32 - proposals_list: List[ProposalInfo] - parent_info: List[Tuple[bytes32, Optional[LineageProof]]] # {coin.name(): LineageProof} + proposals_list: list[ProposalInfo] + parent_info: list[tuple[bytes32, Optional[LineageProof]]] # {coin.name(): LineageProof} current_treasury_coin: Optional[Coin] current_treasury_innerpuz: Optional[Program] singleton_block_height: uint32 # the block height that the current treasury singleton was created in filter_below_vote_amount: uint64 # we ignore proposals with fewer votes than this - defaults to 1 - assets: List[Optional[bytes32]] + assets: list[Optional[bytes32]] current_height: uint64 diff --git a/chia/wallet/dao_wallet/dao_utils.py b/chia/wallet/dao_wallet/dao_utils.py index 214a0c5180ed..4c49d13e38ea 100644 --- a/chia/wallet/dao_wallet/dao_utils.py +++ b/chia/wallet/dao_wallet/dao_utils.py @@ -1,8 +1,9 @@ from __future__ import annotations import logging +from collections.abc import Iterator from itertools import chain -from typing import Any, Iterator, List, Optional, Tuple, Union +from typing import Any, Optional, Union from clvm.EvalError import EvalError @@ -217,7 +218,7 @@ def get_p2_singleton_puzhash(treasury_id: bytes32, asset_id: Optional[bytes32] = def get_lockup_puzzle( cat_tail_hash: Union[bytes32, Program], - previous_votes_list: Union[List[Optional[bytes32]], Program], + previous_votes_list: Union[list[Optional[bytes32]], Program], innerpuz: Optional[Program], ) -> Program: self_hash: Program = DAO_LOCKUP_MOD.curry( @@ -570,7 +571,7 @@ def uncurry_proposal_validator(proposal_validator_program: Program) -> Program: return curried_args -def uncurry_treasury(treasury_puzzle: Program) -> List[Program]: +def uncurry_treasury(treasury_puzzle: Program) -> list[Program]: try: mod, curried_args = treasury_puzzle.uncurry() except ValueError as e: # pragma: no cover @@ -582,7 +583,7 @@ def uncurry_treasury(treasury_puzzle: Program) -> List[Program]: return list(curried_args.as_iter()) -def uncurry_proposal(proposal_puzzle: Program) -> Tuple[Program, Program]: +def uncurry_proposal(proposal_puzzle: Program) -> tuple[Program, Program]: try: mod, curried_args = proposal_puzzle.uncurry() except ValueError as e: # pragma: no cover @@ -598,7 +599,7 @@ def uncurry_proposal(proposal_puzzle: Program) -> Tuple[Program, Program]: return curried_args, c_a -def uncurry_lockup(lockup_puzzle: Program) -> Tuple[Program, Program]: +def uncurry_lockup(lockup_puzzle: Program) -> tuple[Program, Program]: try: mod, curried_args = lockup_puzzle.uncurry() except ValueError as e: # pragma: no cover @@ -615,7 +616,7 @@ def uncurry_lockup(lockup_puzzle: Program) -> Tuple[Program, Program]: # This is the proposed puzzle -def get_proposal_args(puzzle: Program) -> Tuple[ProposalType, Program]: +def get_proposal_args(puzzle: Program) -> tuple[ProposalType, Program]: try: mod, curried_args = puzzle.uncurry() except ValueError as e: # pragma: no cover @@ -697,7 +698,7 @@ def match_finished_puzzle(mod: Program, curried_args: Program) -> Optional[Itera # This is used in WSM to determine whether we have a dao funding spend def match_funding_puzzle( - uncurried: UncurriedPuzzle, solution: Program, coin: Coin, dao_ids: List[bytes32] = [] + uncurried: UncurriedPuzzle, solution: Program, coin: Coin, dao_ids: list[bytes32] = [] ) -> Optional[bool]: if not dao_ids: return None @@ -744,14 +745,14 @@ def match_dao_cat_puzzle(uncurried: UncurriedPuzzle) -> Optional[Iterator[Progra def generate_simple_proposal_innerpuz( treasury_id: bytes32, - recipient_puzhashes: List[bytes32], - amounts: List[uint64], - asset_types: List[Optional[bytes32]] = [None], + recipient_puzhashes: list[bytes32], + amounts: list[uint64], + asset_types: list[Optional[bytes32]] = [None], ) -> Program: if len(recipient_puzhashes) != len(amounts) != len(asset_types): # pragma: no cover raise ValueError("Mismatch in the number of recipients, amounts, or asset types") - xch_conds: List[Any] = [] - cat_conds: List[Any] = [] + xch_conds: list[Any] = [] + cat_conds: list[Any] = [] seen_assets = set() for recipient_puzhash, amount, asset_type in zip(recipient_puzhashes, amounts, asset_types): if asset_type: diff --git a/chia/wallet/dao_wallet/dao_wallet.py b/chia/wallet/dao_wallet/dao_wallet.py index 48d830b6c6e5..2c8fb09935aa 100644 --- a/chia/wallet/dao_wallet/dao_wallet.py +++ b/chia/wallet/dao_wallet/dao_wallet.py @@ -6,7 +6,7 @@ import logging import re import time -from typing import TYPE_CHECKING, Any, ClassVar, Dict, List, Optional, Set, Tuple, Union, cast +from typing import TYPE_CHECKING, Any, ClassVar, Optional, Union, cast from chia_rs import AugSchemeMPL, G1Element, G2Element from clvm.casts import int_from_bytes @@ -326,14 +326,14 @@ def get_parent_for_coin(self, coin: Coin) -> Optional[LineageProof]: parent_info = ccparent return parent_info - async def get_max_send_amount(self, records: Optional[Set[WalletCoinRecord]] = None) -> uint128: + async def get_max_send_amount(self, records: Optional[set[WalletCoinRecord]] = None) -> uint128: return uint128(0) # pragma: no cover - async def get_spendable_balance(self, unspent_records: Optional[Set[WalletCoinRecord]] = None) -> uint128: + async def get_spendable_balance(self, unspent_records: Optional[set[WalletCoinRecord]] = None) -> uint128: # No spendable or receivable value return uint128(1) - async def get_confirmed_balance(self, record_list: Optional[Set[WalletCoinRecord]] = None) -> uint128: + async def get_confirmed_balance(self, record_list: Optional[set[WalletCoinRecord]] = None) -> uint128: # No spendable or receivable value return uint128(1) @@ -341,7 +341,7 @@ async def select_coins( self, amount: uint64, action_scope: WalletActionScope, - ) -> Set[Coin]: + ) -> set[Coin]: """ Returns a set of coins that can be used for generating a new transaction. Note: Must be called under wallet state manager lock @@ -353,13 +353,13 @@ async def select_coins( self.log.warning(f"Can't select {amount}, from spendable {spendable_amount} for wallet id {self.id()}") return set() - spendable_coins: List[WalletCoinRecord] = list( + spendable_coins: list[WalletCoinRecord] = list( await self.wallet_state_manager.get_spendable_coins_for_wallet(self.wallet_info.id) ) # Try to use coins from the store, if there isn't enough of "unused" # coins use change coins that are not confirmed yet - unconfirmed_removals: Dict[bytes32, Coin] = await self.wallet_state_manager.unconfirmed_removals_for_wallet( + unconfirmed_removals: dict[bytes32, Coin] = await self.wallet_state_manager.unconfirmed_removals_for_wallet( self.wallet_info.id ) async with action_scope.use() as interface: @@ -379,7 +379,7 @@ async def get_pending_change_balance(self) -> uint64: # No spendable or receivable value return uint64(0) - async def get_unconfirmed_balance(self, record_list: Optional[Set[WalletCoinRecord]] = None) -> uint128: + async def get_unconfirmed_balance(self, record_list: Optional[set[WalletCoinRecord]] = None) -> uint128: # No spendable or receivable value return uint128(1) @@ -392,7 +392,7 @@ async def get_balance_by_asset_type(self, asset_id: Optional[bytes32] = None) -> # if asset_id == None: then we get normal XCH async def select_coins_for_asset_type( self, amount: uint64, action_scope: WalletActionScope, asset_id: Optional[bytes32] = None - ) -> List[Coin]: + ) -> list[Coin]: puzhash = get_p2_singleton_puzhash(self.dao_info.treasury_id, asset_id=asset_id) records = await self.wallet_state_manager.coin_store.get_coin_records_by_puzzle_hash(puzhash) unspent_records = [r for r in records if not r.spent] @@ -490,7 +490,7 @@ async def resync_treasury_state(self) -> None: if len(children) == 0: break - children_state_list: List[CoinState] = [child for child in children if child.coin.amount % 2 == 1] + children_state_list: list[CoinState] = [child for child in children if child.coin.amount % 2 == 1] # ensure children_state_list has only one odd amount coin (the treasury) if (len(children_state_list) == 0) or (len(children_state_list) > 1): # pragma: no cover raise RuntimeError("Could not retrieve child_state") @@ -613,7 +613,7 @@ async def generate_new_dao( cat_tail_hash: Optional[bytes32] = None, fee: uint64 = uint64(0), fee_for_cat: uint64 = uint64(0), - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> None: """ Create a new DAO treasury using the dao_rules object. This does the first spend to create the launcher @@ -846,7 +846,7 @@ async def generate_new_proposal( action_scope: WalletActionScope, vote_amount: Optional[uint64] = None, fee: uint64 = uint64(0), - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> None: dao_rules = get_treasury_rules_from_puzzle(self.dao_info.current_treasury_innerpuz) coins = await self.standard_wallet.select_coins( @@ -1027,7 +1027,7 @@ async def generate_proposal_vote_spend( is_yes_vote: bool, action_scope: WalletActionScope, fee: uint64 = uint64(0), - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> None: self.log.info(f"Trying to create a proposal close spend with ID: {proposal_id}") proposal_info = None @@ -1145,7 +1145,7 @@ async def create_proposal_close_spend( genesis_id: Optional[bytes32] = None, fee: uint64 = uint64(0), self_destruct: bool = False, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> None: self.log.info(f"Trying to create a proposal close spend with ID: {proposal_id}") proposal_info = None @@ -1536,7 +1536,7 @@ async def _create_treasury_fund_transaction( amount: uint64, action_scope: WalletActionScope, fee: uint64 = uint64(0), - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> None: if funding_wallet.type() == WalletType.STANDARD_WALLET.value: p2_singleton_puzhash = get_p2_singleton_puzhash(self.dao_info.treasury_id, asset_id=None) @@ -1569,7 +1569,7 @@ async def create_add_funds_to_treasury_spend( action_scope: WalletActionScope, fee: uint64 = uint64(0), funding_wallet_id: uint32 = uint32(1), - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> None: # set up the p2_singleton funding_wallet = self.wallet_state_manager.wallets[funding_wallet_id] @@ -1594,7 +1594,7 @@ async def free_coins_from_finished_proposals( self, action_scope: WalletActionScope, fee: uint64 = uint64(0), - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> None: dao_cat_wallet: DAOCATWallet = self.wallet_state_manager.wallets[self.dao_info.dao_cat_wallet_id] spends = [] @@ -1648,7 +1648,7 @@ async def free_coins_from_finished_proposals( async with action_scope.use() as interface: interface.side_effects.transactions.append(record) - async def parse_proposal(self, proposal_id: bytes32) -> Dict[str, Any]: + async def parse_proposal(self, proposal_id: bytes32) -> dict[str, Any]: for prop_info in self.dao_info.proposals_list: if prop_info.proposal_id == proposal_id: state = await self.get_proposal_state(proposal_id) @@ -1675,10 +1675,10 @@ async def parse_proposal(self, proposal_id: bytes32) -> Dict[str, Any]: cc = {"puzzle_hash": cond.at("rf").as_atom(), "amount": cond.at("rrf").as_int()} xch_created_coins.append(cc) - asset_create_coins: List[Dict[Any, Any]] = [] + asset_create_coins: list[dict[Any, Any]] = [] for asset in LIST_OF_TAILHASH_CONDITIONS.as_iter(): if asset == Program.to(0): # pragma: no cover - asset_dict: Optional[Dict[str, Any]] = None + asset_dict: Optional[dict[str, Any]] = None else: asset_id = asset.first().as_atom() cc_list = [] @@ -1691,7 +1691,7 @@ async def parse_proposal(self, proposal_id: bytes32) -> Dict[str, Any]: # cc_list.append([asset_id, asset_dict]) cc_list.append(asset_dict) asset_create_coins.append({"asset_id": asset_id, "conditions": cc_list}) - dictionary: Dict[str, Any] = { + dictionary: dict[str, Any] = { "state": state, "proposal_type": proposal_type.value, "proposed_puzzle_reveal": proposed_puzzle_reveal, @@ -1761,7 +1761,7 @@ async def enter_dao_cat_voting_mode( self, amount: uint64, action_scope: WalletActionScope, - ) -> List[TransactionRecord]: + ) -> list[TransactionRecord]: dao_cat_wallet: DAOCATWallet = self.wallet_state_manager.wallets[self.dao_info.dao_cat_wallet_id] return await dao_cat_wallet.enter_dao_cat_voting_mode(amount, action_scope) @@ -2004,7 +2004,7 @@ async def update_closed_proposal_coin(self, new_state: CoinSpend, block_height: return index = index + 1 - async def get_proposal_state(self, proposal_id: bytes32) -> Dict[str, Union[int, bool]]: + async def get_proposal_state(self, proposal_id: bytes32) -> dict[str, Union[int, bool]]: """ Use this to figure out whether a proposal has passed or failed and whether it can be closed Given a proposal_id: diff --git a/chia/wallet/db_wallet/db_wallet_puzzles.py b/chia/wallet/db_wallet/db_wallet_puzzles.py index af39088670e3..b2707ea3f198 100644 --- a/chia/wallet/db_wallet/db_wallet_puzzles.py +++ b/chia/wallet/db_wallet/db_wallet_puzzles.py @@ -1,6 +1,7 @@ from __future__ import annotations -from typing import Iterator, List, Tuple, Union +from collections.abc import Iterator +from typing import Union from chia.types.blockchain_format.program import INFINITE_COST, Program from chia.types.blockchain_format.serialized_program import SerializedProgram @@ -40,7 +41,7 @@ def create_host_layer_puzzle(innerpuz: Union[Program, bytes32], current_root: by ) -def match_dl_singleton(puzzle: Union[Program, SerializedProgram]) -> Tuple[bool, Iterator[Program]]: +def match_dl_singleton(puzzle: Union[Program, SerializedProgram]) -> tuple[bool, Iterator[Program]]: """ Given a puzzle test if it's a CAT and, if it is, return the curried arguments """ @@ -56,7 +57,7 @@ def match_dl_singleton(puzzle: Union[Program, SerializedProgram]) -> Tuple[bool, return False, iter(()) -def launch_solution_to_singleton_info(launch_solution: Program) -> Tuple[bytes32, uint64, bytes32, bytes32]: +def launch_solution_to_singleton_info(launch_solution: Program) -> tuple[bytes32, uint64, bytes32, bytes32]: solution = launch_solution.as_python() try: full_puzzle_hash = bytes32(solution[0]) @@ -77,7 +78,7 @@ def launcher_to_struct(launcher_id: bytes32) -> Program: def create_graftroot_offer_puz( - launcher_ids: List[bytes32], values_to_prove: List[List[bytes32]], inner_puzzle: Program + launcher_ids: list[bytes32], values_to_prove: list[list[bytes32]], inner_puzzle: Program ) -> Program: return GRAFTROOT_DL_OFFERS.curry( inner_puzzle, @@ -96,7 +97,7 @@ def create_mirror_puzzle() -> Program: def get_mirror_info( parent_puzzle: Union[Program, SerializedProgram], parent_solution: Union[Program, SerializedProgram] -) -> Tuple[bytes32, List[bytes]]: +) -> tuple[bytes32, list[bytes]]: assert type(parent_puzzle) is type(parent_solution) _, conditions = parent_puzzle.run_with_cost(INFINITE_COST, parent_solution) for condition in conditions.as_iter(): @@ -104,7 +105,7 @@ def get_mirror_info( condition.first().as_python() == ConditionOpcode.CREATE_COIN and condition.at("rf").as_python() == create_mirror_puzzle().get_tree_hash() ): - memos: List[bytes] = condition.at("rrrf").as_python() + memos: list[bytes] = condition.at("rrrf").as_python() launcher_id = bytes32(memos[0]) return launcher_id, [url for url in memos[1:]] raise ValueError("The provided puzzle and solution do not create a mirror coin") diff --git a/chia/wallet/derive_keys.py b/chia/wallet/derive_keys.py index 0f20a223f1f0..fac41b6b6874 100644 --- a/chia/wallet/derive_keys.py +++ b/chia/wallet/derive_keys.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import List, Optional, Set, Tuple +from typing import Optional from chia_rs import AugSchemeMPL, G1Element, PrivateKey @@ -18,19 +18,19 @@ MAX_POOL_WALLETS = 100 -def _derive_path(sk: PrivateKey, path: List[int]) -> PrivateKey: +def _derive_path(sk: PrivateKey, path: list[int]) -> PrivateKey: for index in path: sk = AugSchemeMPL.derive_child_sk(sk, index) return sk -def _derive_path_unhardened(sk: PrivateKey, path: List[int]) -> PrivateKey: +def _derive_path_unhardened(sk: PrivateKey, path: list[int]) -> PrivateKey: for index in path: sk = AugSchemeMPL.derive_child_sk_unhardened(sk, index) return sk -def _derive_pk_unhardened(pk: G1Element, path: List[int]) -> G1Element: +def _derive_pk_unhardened(pk: G1Element, path: list[int]) -> G1Element: for index in path: pk = AugSchemeMPL.derive_child_pk_unhardened(pk, index) return pk @@ -95,7 +95,7 @@ def master_sk_to_pooling_authentication_sk(master: PrivateKey, pool_wallet_index return _derive_path(master, [12381, 8444, 6, pool_wallet_index * 10000 + index]) -def find_owner_sk(all_sks: List[PrivateKey], owner_pk: G1Element) -> Optional[Tuple[PrivateKey, uint32]]: +def find_owner_sk(all_sks: list[PrivateKey], owner_pk: G1Element) -> Optional[tuple[PrivateKey, uint32]]: for pool_wallet_index in range(MAX_POOL_WALLETS): for sk in all_sks: try_owner_sk = master_sk_to_singleton_owner_sk(sk, uint32(pool_wallet_index)) @@ -104,7 +104,7 @@ def find_owner_sk(all_sks: List[PrivateKey], owner_pk: G1Element) -> Optional[Tu return None -def find_authentication_sk(all_sks: List[PrivateKey], owner_pk: G1Element) -> Optional[PrivateKey]: +def find_authentication_sk(all_sks: list[PrivateKey], owner_pk: G1Element) -> Optional[PrivateKey]: # NOTE: might need to increase this if using a large number of wallets, or have switched authentication keys # many times. for pool_wallet_index in range(MAX_POOL_WALLETS): @@ -117,8 +117,8 @@ def find_authentication_sk(all_sks: List[PrivateKey], owner_pk: G1Element) -> Op def match_address_to_sk( - sk: PrivateKey, addresses_to_search: List[bytes32], max_ph_to_search: int = 500 -) -> Set[bytes32]: + sk: PrivateKey, addresses_to_search: list[bytes32], max_ph_to_search: int = 500 +) -> set[bytes32]: """ Checks the list of given address is a derivation of the given sk within the given number of derivations Returns a Set of the addresses that are derivations of the given sk @@ -126,8 +126,8 @@ def match_address_to_sk( if sk is None or not addresses_to_search: return set() - found_addresses: Set[bytes32] = set() - search_list: Set[bytes32] = set(addresses_to_search) + found_addresses: set[bytes32] = set() + search_list: set[bytes32] = set(addresses_to_search) for i in range(max_ph_to_search): phs = [ diff --git a/chia/wallet/did_wallet/did_info.py b/chia/wallet/did_wallet/did_info.py index 12d5e3653d7d..825f681d54dd 100644 --- a/chia/wallet/did_wallet/did_info.py +++ b/chia/wallet/did_wallet/did_info.py @@ -1,7 +1,7 @@ from __future__ import annotations from dataclasses import dataclass -from typing import List, Optional, Tuple +from typing import Optional from chia.protocols.wallet_protocol import CoinState from chia.types.blockchain_format.coin import Coin @@ -16,9 +16,9 @@ @dataclass(frozen=True) class DIDInfo(Streamable): origin_coin: Optional[Coin] # Coin ID of this coin is our DID - backup_ids: List[bytes32] + backup_ids: list[bytes32] num_of_backup_ids_needed: uint64 - parent_info: List[Tuple[bytes32, Optional[LineageProof]]] # {coin.name(): LineageProof} + parent_info: list[tuple[bytes32, Optional[LineageProof]]] # {coin.name(): LineageProof} current_inner: Optional[Program] # represents a Program as bytes temp_coin: Optional[Coin] # partially recovered wallet uses these to hold info temp_puzhash: Optional[bytes32] diff --git a/chia/wallet/did_wallet/did_wallet.py b/chia/wallet/did_wallet/did_wallet.py index 28cb5875199e..bb3ae4ed6902 100644 --- a/chia/wallet/did_wallet/did_wallet.py +++ b/chia/wallet/did_wallet/did_wallet.py @@ -5,7 +5,7 @@ import logging import re import time -from typing import TYPE_CHECKING, Any, ClassVar, Dict, List, Optional, Set, Tuple, cast +from typing import TYPE_CHECKING, Any, ClassVar, Optional, cast from chia_rs import AugSchemeMPL, G1Element, G2Element @@ -77,12 +77,12 @@ async def create_new_did_wallet( wallet: Wallet, amount: uint64, action_scope: WalletActionScope, - backups_ids: List[bytes32] = [], + backups_ids: list[bytes32] = [], num_of_backup_ids_needed: uint64 = None, - metadata: Dict[str, str] = {}, + metadata: dict[str, str] = {}, name: Optional[str] = None, fee: uint64 = uint64(0), - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ): """ Create a brand new DID wallet @@ -226,7 +226,7 @@ async def create_new_did_wallet_from_coin_spend( _, recovery_list_hash, num_verification, _, metadata = args full_solution: Program = Program.from_bytes(bytes(coin_spend.solution)) inner_solution: Program = full_solution.rest().rest().first() - recovery_list: List[bytes32] = [] + recovery_list: list[bytes32] = [] backup_required: int = num_verification.as_int() if recovery_list_hash != NIL_TREEHASH: try: @@ -340,7 +340,7 @@ async def select_coins( self, amount: uint64, action_scope: WalletActionScope, - ) -> Set[Coin]: + ) -> set[Coin]: try: async with action_scope.use() as interface: coin = await self.get_coin() @@ -571,7 +571,7 @@ async def create_update_spend( self, action_scope: WalletActionScope, fee: uint64 = uint64(0), - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> None: assert self.did_info.current_inner is not None assert self.did_info.origin_coin is not None @@ -670,7 +670,7 @@ async def transfer_did( fee: uint64, with_recovery: bool, action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> None: """ Transfer the current DID to another owner @@ -759,7 +759,7 @@ async def transfer_did( async def create_message_spend( self, action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> None: assert self.did_info.current_inner is not None assert self.did_info.origin_coin is not None @@ -892,8 +892,8 @@ async def create_attestment( newpuz: bytes32, pubkey: G1Element, action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), - ) -> Tuple[WalletSpendBundle, str]: + extra_conditions: tuple[Condition, ...] = tuple(), + ) -> tuple[WalletSpendBundle, str]: """ Create an attestment TODO: @@ -971,7 +971,7 @@ async def create_attestment( attest_str += f"{self.did_info.current_inner.get_tree_hash().hex()}:{coin.amount}" return message_spend_bundle, attest_str - async def get_info_for_recovery(self) -> Optional[Tuple[bytes32, bytes32, uint64]]: + async def get_info_for_recovery(self) -> Optional[tuple[bytes32, bytes32, uint64]]: assert self.did_info.current_inner is not None assert self.did_info.origin_coin is not None try: @@ -983,7 +983,7 @@ async def get_info_for_recovery(self) -> Optional[Tuple[bytes32, bytes32, uint64 amount = uint64(coin.amount) return (parent, innerpuzhash, amount) - async def load_attest_files_for_recovery_spend(self, attest_data: List[str]) -> Tuple[List, WalletSpendBundle]: + async def load_attest_files_for_recovery_spend(self, attest_data: list[str]) -> tuple[list, WalletSpendBundle]: spend_bundle_list = [] info_dict = {} for attest in attest_data: @@ -996,7 +996,7 @@ async def load_attest_files_for_recovery_spend(self, attest_data: List[str]) -> new_sb = WalletSpendBundle.from_bytes(bytes.fromhex(info[1])) spend_bundle_list.append(new_sb) # info_dict {0xidentity: "(0xparent_info 0xinnerpuz amount)"} - my_recovery_list: List[bytes32] = self.did_info.backup_ids + my_recovery_list: list[bytes32] = self.did_info.backup_ids # convert info dict into recovery list - same order as wallet info_list = [] @@ -1018,7 +1018,7 @@ async def recovery_spend( self, coin: Coin, puzhash: bytes32, - parent_innerpuzhash_amounts_for_recovery_ids: List[Tuple[bytes, bytes, int]], + parent_innerpuzhash_amounts_for_recovery_ids: list[tuple[bytes, bytes, int]], pubkey: G1Element, spend_bundle: WalletSpendBundle, action_scope: WalletActionScope, @@ -1176,7 +1176,7 @@ def get_parent_for_coin(self, coin) -> Optional[LineageProof]: return parent_info - async def sign_message(self, message: str, mode: SigningMode) -> Tuple[G1Element, G2Element]: + async def sign_message(self, message: str, mode: SigningMode) -> tuple[G1Element, G2Element]: if self.did_info.current_inner is None: raise ValueError("Missing DID inner puzzle.") puzzle_args = did_wallet_puzzles.uncurry_innerpuz(self.did_info.current_inner) @@ -1203,7 +1203,7 @@ async def generate_new_decentralised_id( amount: uint64, action_scope: WalletActionScope, fee: uint64 = uint64(0), - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> None: """ This must be called under the wallet state manager lock @@ -1300,7 +1300,7 @@ async def generate_eve_spend( coin: Coin, full_puzzle: Program, innerpuz: Program, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ): assert self.did_info.origin_coin is not None uncurried = did_wallet_puzzles.uncurry_innerpuz(innerpuz) @@ -1330,8 +1330,8 @@ async def get_spendable_balance(self, unspent_records=None) -> uint128: ) return spendable_am - async def get_max_send_amount(self, records: Optional[Set[WalletCoinRecord]] = None): - spendable: List[WalletCoinRecord] = list( + async def get_max_send_amount(self, records: Optional[set[WalletCoinRecord]] = None): + spendable: list[WalletCoinRecord] = list( await self.wallet_state_manager.get_spendable_coins_for_wallet(self.id(), records) ) max_send_amount = sum(cr.coin.amount for cr in spendable) @@ -1355,7 +1355,7 @@ async def add_parent(self, name: bytes32, parent: Optional[LineageProof]): ) await self.save_info(did_info) - async def update_recovery_list(self, recover_list: List[bytes32], num_of_backup_ids_needed: uint64) -> bool: + async def update_recovery_list(self, recover_list: list[bytes32], num_of_backup_ids_needed: uint64) -> bool: if num_of_backup_ids_needed > len(recover_list): return False did_info = DIDInfo( @@ -1374,7 +1374,7 @@ async def update_recovery_list(self, recover_list: List[bytes32], num_of_backup_ await self.wallet_state_manager.update_wallet_puzzle_hashes(self.wallet_info.id) return True - async def update_metadata(self, metadata: Dict[str, str]) -> bool: + async def update_metadata(self, metadata: dict[str, str]) -> bool: # validate metadata if not all(isinstance(k, str) and isinstance(v, str) for k, v in metadata.items()): raise ValueError("Metadata key value pairs must be strings.") @@ -1468,7 +1468,7 @@ def require_derivation_paths(self) -> bool: return True async def get_coin(self) -> Coin: - spendable_coins: Set[WalletCoinRecord] = await self.wallet_state_manager.get_spendable_coins_for_wallet( + spendable_coins: set[WalletCoinRecord] = await self.wallet_state_manager.get_spendable_coins_for_wallet( self.wallet_info.id ) if len(spendable_coins) == 0: diff --git a/chia/wallet/did_wallet/did_wallet_puzzles.py b/chia/wallet/did_wallet/did_wallet_puzzles.py index 2308cdf5cb85..c3b8b593cf55 100644 --- a/chia/wallet/did_wallet/did_wallet_puzzles.py +++ b/chia/wallet/did_wallet/did_wallet_puzzles.py @@ -1,6 +1,7 @@ from __future__ import annotations -from typing import Dict, Iterator, List, Optional, Tuple, Union +from collections.abc import Iterator +from typing import Optional, Union from chia_rs import G1Element @@ -40,7 +41,7 @@ def create_innerpuz( p2_puzzle_or_hash: Union[Program, bytes32], - recovery_list: List[bytes32], + recovery_list: list[bytes32], num_of_backup_ids_needed: uint64, launcher_id: bytes32, metadata: Program = Program.to([]), @@ -69,7 +70,7 @@ def create_innerpuz( def get_inner_puzhash_by_p2( p2_puzhash: bytes32, - recovery_list: List[bytes32], + recovery_list: list[bytes32], num_of_backup_ids_needed: uint64, launcher_id: bytes32, metadata: Program = Program.to([]), @@ -111,7 +112,7 @@ def is_did_innerpuz(inner_f: Program) -> bool: return inner_f == DID_INNERPUZ_MOD -def uncurry_innerpuz(puzzle: Program) -> Optional[Tuple[Program, Program, Program, Program, Program]]: +def uncurry_innerpuz(puzzle: Program) -> Optional[tuple[Program, Program, Program, Program, Program]]: """ Uncurry a DID inner puzzle :param puzzle: DID puzzle @@ -196,7 +197,7 @@ def check_is_did_puzzle(puzzle: Program) -> bool: return is_singleton(inner_f) -def metadata_to_program(metadata: Dict[str, str]) -> Program: +def metadata_to_program(metadata: dict[str, str]) -> Program: """ Convert the metadata dict to a Chialisp program :param metadata: User defined metadata @@ -208,7 +209,7 @@ def metadata_to_program(metadata: Dict[str, str]) -> Program: return Program.to(kv_list) -def did_program_to_metadata(program: Program) -> Dict[str, str]: +def did_program_to_metadata(program: Program) -> dict[str, str]: """ Convert a program to a metadata dict :param program: Chialisp program contains the metadata diff --git a/chia/wallet/lineage_proof.py b/chia/wallet/lineage_proof.py index 59a3477315ec..a5a0d37fa2d2 100644 --- a/chia/wallet/lineage_proof.py +++ b/chia/wallet/lineage_proof.py @@ -2,7 +2,7 @@ from dataclasses import dataclass from enum import Enum -from typing import Any, Dict, List, Optional +from typing import Any, Optional from chia.types.blockchain_format.program import Program from chia.types.blockchain_format.sized_bytes import bytes32 @@ -24,8 +24,8 @@ class LineageProof(Streamable): amount: Optional[uint64] = None @classmethod - def from_program(cls, program: Program, fields: List[LineageProofField]) -> LineageProof: - lineage_proof_info: Dict[str, Any] = {} + def from_program(cls, program: Program, fields: list[LineageProofField]) -> LineageProof: + lineage_proof_info: dict[str, Any] = {} field_iter = iter(fields) program_iter = program.as_iter() for program_value in program_iter: @@ -45,7 +45,7 @@ def from_program(cls, program: Program, fields: List[LineageProofField]) -> Line return LineageProof(**lineage_proof_info) def to_program(self) -> Program: - final_list: List[Any] = [] + final_list: list[Any] = [] if self.parent_name is not None: final_list.append(self.parent_name) if self.inner_puzzle_hash is not None: diff --git a/chia/wallet/nft_wallet/metadata_outer_puzzle.py b/chia/wallet/nft_wallet/metadata_outer_puzzle.py index 1dc0fd65cede..0545227c007f 100644 --- a/chia/wallet/nft_wallet/metadata_outer_puzzle.py +++ b/chia/wallet/nft_wallet/metadata_outer_puzzle.py @@ -1,7 +1,7 @@ from __future__ import annotations from dataclasses import dataclass -from typing import Callable, List, Optional, Tuple +from typing import Callable, Optional from chia.types.blockchain_format.program import Program from chia.types.blockchain_format.sized_bytes import bytes32 @@ -15,7 +15,7 @@ NFT_STATE_LAYER_MOD_HASH = NFT_STATE_LAYER_MOD.get_tree_hash() -def match_metadata_layer_puzzle(puzzle: UncurriedPuzzle) -> Tuple[bool, List[Program]]: +def match_metadata_layer_puzzle(puzzle: UncurriedPuzzle) -> tuple[bool, list[Program]]: if puzzle.mod == NFT_STATE_LAYER_MOD: return True, list(puzzle.args.as_iter()) return False, [] diff --git a/chia/wallet/nft_wallet/nft_info.py b/chia/wallet/nft_wallet/nft_info.py index b70571433a53..1d54458ab5b3 100644 --- a/chia/wallet/nft_wallet/nft_info.py +++ b/chia/wallet/nft_wallet/nft_info.py @@ -1,7 +1,7 @@ from __future__ import annotations from dataclasses import dataclass -from typing import List, Optional +from typing import Optional from chia.types.blockchain_format.coin import Coin from chia.types.blockchain_format.program import Program @@ -40,19 +40,19 @@ class NFTInfo(Streamable): royalty_puzzle_hash: Optional[bytes32] """Puzzle hash where royalty will be sent to""" - data_uris: List[str] + data_uris: list[str] """ A list of content URIs""" data_hash: bytes """Hash of the content""" - metadata_uris: List[str] + metadata_uris: list[str] """A list of metadata URIs""" metadata_hash: bytes """Hash of the metadata""" - license_uris: List[str] + license_uris: list[str] """A list of license URIs""" license_hash: bytes diff --git a/chia/wallet/nft_wallet/nft_puzzles.py b/chia/wallet/nft_wallet/nft_puzzles.py index 4acdbac52cbf..90dc670e7570 100644 --- a/chia/wallet/nft_wallet/nft_puzzles.py +++ b/chia/wallet/nft_wallet/nft_puzzles.py @@ -1,7 +1,7 @@ from __future__ import annotations import logging -from typing import Any, Dict, List, Literal, Optional, Tuple, Union +from typing import Any, Literal, Optional, Union from clvm_tools.binutils import disassemble @@ -86,7 +86,7 @@ def create_full_puzzle( return full_puzzle -async def get_nft_info_from_puzzle(nft_coin_info: NFTCoinInfo, config: Dict[str, Any]) -> NFTInfo: +async def get_nft_info_from_puzzle(nft_coin_info: NFTCoinInfo, config: dict[str, Any]) -> NFTInfo: """ Extract NFT info from a full puzzle :param nft_coin_info NFTCoinInfo in local database @@ -96,14 +96,14 @@ async def get_nft_info_from_puzzle(nft_coin_info: NFTCoinInfo, config: Dict[str, """ uncurried_nft: Optional[UncurriedNFT] = UncurriedNFT.uncurry(*nft_coin_info.full_puzzle.uncurry()) assert uncurried_nft is not None - data_uris: List[str] = [] + data_uris: list[str] = [] for uri in uncurried_nft.data_uris.as_python(): # pylint: disable=E1133 data_uris.append(str(uri, "utf-8")) - meta_uris: List[str] = [] + meta_uris: list[str] = [] for uri in uncurried_nft.meta_uris.as_python(): # pylint: disable=E1133 meta_uris.append(str(uri, "utf-8")) - license_uris: List[str] = [] + license_uris: list[str] = [] for uri in uncurried_nft.license_uris.as_python(): # pylint: disable=E1133 license_uris.append(str(uri, "utf-8")) off_chain_metadata: Optional[str] = None @@ -135,7 +135,7 @@ async def get_nft_info_from_puzzle(nft_coin_info: NFTCoinInfo, config: Dict[str, return nft_info -def metadata_to_program(metadata: Dict[bytes, Any]) -> Program: +def metadata_to_program(metadata: dict[bytes, Any]) -> Program: """ Convert the metadata dict to a Chialisp program :param metadata: User defined metadata @@ -148,7 +148,7 @@ def metadata_to_program(metadata: Dict[bytes, Any]) -> Program: return program -def nft_program_to_metadata(program: Program) -> Dict[bytes, Any]: +def nft_program_to_metadata(program: Program) -> dict[bytes, Any]: """ Convert a program to a metadata dict :param program: Chialisp program contains the metadata @@ -160,7 +160,7 @@ def nft_program_to_metadata(program: Program) -> Dict[bytes, Any]: return metadata -def prepend_value(key: bytes, value: Program, metadata: Dict[bytes, Any]) -> None: +def prepend_value(key: bytes, value: Program, metadata: dict[bytes, Any]) -> None: """ Prepend a value to a list in the metadata :param key: Key of the field @@ -182,7 +182,7 @@ def update_metadata(metadata: Program, update_condition: Program) -> Program: :param update_condition: Update metadata conditions :return: Updated metadata """ - new_metadata: Dict[bytes, Any] = nft_program_to_metadata(metadata) + new_metadata: dict[bytes, Any] = nft_program_to_metadata(metadata) uri: Program = update_condition.rest().rest().first() prepend_value(uri.first().as_python(), uri.rest(), new_metadata) return metadata_to_program(new_metadata) @@ -223,7 +223,7 @@ def create_ownership_layer_puzzle( def create_ownership_layer_transfer_solution( - new_did: bytes, new_did_inner_hash: bytes, trade_prices_list: List[List[int]], new_puzhash: bytes32 + new_did: bytes, new_did_inner_hash: bytes, trade_prices_list: list[list[int]], new_puzhash: bytes32 ) -> Program: log.debug( "Creating a transfer solution with: DID:%s Inner_puzhash:%s trade_price:%s puzhash:%s", @@ -239,7 +239,7 @@ def create_ownership_layer_transfer_solution( return solution -def get_metadata_and_phs(unft: UncurriedNFT, solution: SerializedProgram) -> Tuple[Program, bytes32]: +def get_metadata_and_phs(unft: UncurriedNFT, solution: SerializedProgram) -> tuple[Program, bytes32]: conditions = unft.p2_puzzle.run(unft.get_innermost_solution(solution.to_program())) metadata = unft.metadata puzhash_for_derivation: Optional[bytes32] = None diff --git a/chia/wallet/nft_wallet/nft_wallet.py b/chia/wallet/nft_wallet/nft_wallet.py index e248ad432fdd..f48faad72cab 100644 --- a/chia/wallet/nft_wallet/nft_wallet.py +++ b/chia/wallet/nft_wallet/nft_wallet.py @@ -5,7 +5,7 @@ import logging import math import time -from typing import TYPE_CHECKING, Any, ClassVar, Dict, List, Optional, Set, Tuple, Type, TypeVar, cast +from typing import TYPE_CHECKING, Any, ClassVar, Optional, TypeVar, cast from chia_rs import AugSchemeMPL, G1Element, G2Element from clvm.casts import int_from_bytes, int_to_bytes @@ -81,7 +81,7 @@ def did_id(self) -> Optional[bytes32]: @classmethod async def create_new_nft_wallet( - cls: Type[_T_NFTWallet], + cls: type[_T_NFTWallet], wallet_state_manager: Any, wallet: Wallet, did_id: Optional[bytes32] = None, @@ -111,7 +111,7 @@ async def create_new_nft_wallet( @classmethod async def create( - cls: Type[_T_NFTWallet], + cls: type[_T_NFTWallet], wallet_state_manager: Any, wallet: Wallet, wallet_info: WalletInfo, @@ -138,22 +138,22 @@ def id(self) -> uint32: def get_did(self) -> Optional[bytes32]: return self.did_id - async def get_confirmed_balance(self, record_list: Optional[Set[WalletCoinRecord]] = None) -> uint128: + async def get_confirmed_balance(self, record_list: Optional[set[WalletCoinRecord]] = None) -> uint128: """The NFT wallet doesn't really have a balance.""" return uint128(0) - async def get_unconfirmed_balance(self, record_list: Optional[Set[WalletCoinRecord]] = None) -> uint128: + async def get_unconfirmed_balance(self, record_list: Optional[set[WalletCoinRecord]] = None) -> uint128: """The NFT wallet doesn't really have a balance.""" return uint128(0) - async def get_spendable_balance(self, unspent_records: Optional[Set[WalletCoinRecord]] = None) -> uint128: + async def get_spendable_balance(self, unspent_records: Optional[set[WalletCoinRecord]] = None) -> uint128: """The NFT wallet doesn't really have a balance.""" return uint128(0) async def get_pending_change_balance(self) -> uint64: return uint64(0) - async def get_max_send_amount(self, records: Optional[Set[WalletCoinRecord]] = None) -> uint128: + async def get_max_send_amount(self, records: Optional[set[WalletCoinRecord]] = None) -> uint128: """This is the confirmed balance, which we set to 0 as the NFT wallet doesn't have one.""" return uint128(0) @@ -199,7 +199,7 @@ async def puzzle_solution_received(self, coin: Coin, data: NFTCoinData, peer: WS self.log.debug("Not our NFT, pointing to %s, skipping", p2_puzzle_hash) return p2_puzzle = puzzle_for_pk(derivation_record.pubkey) - launcher_coin_states: List[CoinState] = await self.wallet_state_manager.wallet_node.get_coin_state( + launcher_coin_states: list[CoinState] = await self.wallet_state_manager.wallet_node.get_coin_state( [singleton_id], peer=peer ) assert ( @@ -298,7 +298,7 @@ async def remove_coin(self, coin: Coin, height: uint32) -> None: async def get_did_approval_info( self, - nft_ids: List[bytes32], + nft_ids: list[bytes32], action_scope: WalletActionScope, did_id: Optional[bytes32] = None, ) -> bytes32: @@ -334,7 +334,7 @@ async def generate_new_nft( percentage: uint16 = uint16(0), did_id: Optional[bytes] = None, fee: uint64 = uint64(0), - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> bytes32: """ This must be called under the wallet state manager lock @@ -443,7 +443,7 @@ async def update_metadata( uri: str, action_scope: WalletActionScope, fee: uint64 = uint64(0), - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> None: uncurried_nft = UncurriedNFT.uncurry(*nft_coin_info.full_puzzle.uncurry()) assert uncurried_nft is not None @@ -466,7 +466,7 @@ async def update_metadata( await self.update_coin_status(nft_coin_info.coin.name(), True) self.wallet_state_manager.state_changed("nft_coin_updated", self.wallet_info.id) - async def get_current_nfts(self, start_index: int = 0, count: int = 50) -> List[NFTCoinInfo]: + async def get_current_nfts(self, start_index: int = 0, count: int = 50) -> list[NFTCoinInfo]: return await self.nft_store.get_nft_list(wallet_id=self.id(), start_index=start_index, count=count) async def get_nft_count(self) -> int: @@ -502,7 +502,7 @@ async def get_puzzle_info(self, nft_id: bytes32) -> PuzzleInfo: else: return puzzle_info - async def sign_message(self, message: str, nft: NFTCoinInfo, mode: SigningMode) -> Tuple[G1Element, G2Element]: + async def sign_message(self, message: str, nft: NFTCoinInfo, mode: SigningMode) -> tuple[G1Element, G2Element]: uncurried_nft = UncurriedNFT.uncurry(*nft.full_puzzle.uncurry()) if uncurried_nft is not None: p2_puzzle = uncurried_nft.p2_puzzle @@ -527,7 +527,7 @@ async def get_coins_to_offer( nft_id: bytes32, *args: Any, **kwargs: Any, - ) -> Set[Coin]: + ) -> set[Coin]: nft_coin: Optional[NFTCoinInfo] = await self.get_nft(nft_id) if nft_coin is None: raise ValueError("An asset ID was specified that this wallet doesn't track") @@ -565,21 +565,21 @@ async def create_from_puzzle_info( async def generate_signed_transaction( self, - amounts: List[uint64], - puzzle_hashes: List[bytes32], + amounts: list[uint64], + puzzle_hashes: list[bytes32], action_scope: WalletActionScope, fee: uint64 = uint64(0), - coins: Optional[Set[Coin]] = None, - memos: Optional[List[List[bytes]]] = None, - extra_conditions: Tuple[Condition, ...] = tuple(), + coins: Optional[set[Coin]] = None, + memos: Optional[list[list[bytes]]] = None, + extra_conditions: tuple[Condition, ...] = tuple(), **kwargs: Unpack[GSTOptionalArgs], ) -> None: nft_coin: Optional[NFTCoinInfo] = kwargs.get("nft_coin", None) new_owner: Optional[bytes] = kwargs.get("new_owner", None) new_did_inner_hash: Optional[bytes] = kwargs.get("new_did_inner_hash", None) trade_prices_list: Optional[Program] = kwargs.get("trade_prices_list", None) - additional_bundles: List[WalletSpendBundle] = kwargs.get("additional_bundles", []) - metadata_update: Optional[Tuple[str, str]] = kwargs.get("metadata_update", None) + additional_bundles: list[WalletSpendBundle] = kwargs.get("additional_bundles", []) + metadata_update: Optional[tuple[str, str]] = kwargs.get("metadata_update", None) if memos is None: memos = [[] for _ in range(len(puzzle_hashes))] @@ -588,7 +588,7 @@ async def generate_signed_transaction( payments = [] for amount, puzhash, memo_list in zip(amounts, puzzle_hashes, memos): - memos_with_hint: List[bytes] = [puzhash] + memos_with_hint: list[bytes] = [puzhash] memos_with_hint.extend(memo_list) payments.append(Payment(puzhash, amount, memos_with_hint)) @@ -608,10 +608,10 @@ async def generate_signed_transaction( spend_bundle = WalletSpendBundle.aggregate([unsigned_spend_bundle] + additional_bundles) async with action_scope.use() as interface: - other_tx_removals: Set[Coin] = { + other_tx_removals: set[Coin] = { removal for tx in interface.side_effects.transactions for removal in tx.removals } - other_tx_additions: Set[Coin] = { + other_tx_additions: set[Coin] = { addition for tx in interface.side_effects.transactions for addition in tx.additions } tx = TransactionRecord( @@ -638,16 +638,16 @@ async def generate_signed_transaction( async def generate_unsigned_spendbundle( self, - payments: List[Payment], + payments: list[Payment], action_scope: WalletActionScope, fee: uint64 = uint64(0), - coins: Optional[Set[Coin]] = None, + coins: Optional[set[Coin]] = None, new_owner: Optional[bytes] = None, new_did_inner_hash: Optional[bytes] = None, trade_prices_list: Optional[Program] = None, - metadata_update: Optional[Tuple[str, str]] = None, + metadata_update: Optional[tuple[str, str]] = None, nft_coin: Optional[NFTCoinInfo] = None, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> WalletSpendBundle: if nft_coin is None: if coins is None or not len(coins) == 1: @@ -720,10 +720,10 @@ async def generate_unsigned_spendbundle( @staticmethod def royalty_calculation( - royalty_assets_dict: Dict[Any, Tuple[Any, uint16]], - fungible_asset_dict: Dict[Any, uint64], - ) -> Dict[Any, List[Dict[str, Any]]]: - summary_dict: Dict[Any, List[Dict[str, Any]]] = {} + royalty_assets_dict: dict[Any, tuple[Any, uint16]], + fungible_asset_dict: dict[Any, uint64], + ) -> dict[Any, list[dict[str, Any]]]: + summary_dict: dict[Any, list[dict[str, Any]]] = {} for id, royalty_info in royalty_assets_dict.items(): address, percentage = royalty_info summary_dict[id] = [] @@ -741,14 +741,14 @@ def royalty_calculation( @staticmethod async def make_nft1_offer( wallet_state_manager: Any, - offer_dict: Dict[Optional[bytes32], int], - driver_dict: Dict[bytes32, PuzzleInfo], + offer_dict: dict[Optional[bytes32], int], + driver_dict: dict[bytes32, PuzzleInfo], action_scope: WalletActionScope, fee: uint64, - extra_conditions: Tuple[Condition, ...], + extra_conditions: tuple[Condition, ...], ) -> Offer: # First, let's take note of all the royalty enabled NFTs - royalty_nft_asset_dict: Dict[bytes32, int] = {} + royalty_nft_asset_dict: dict[bytes32, int] = {} for asset, amount in offer_dict.items(): if asset is not None and driver_dict[asset].check_type( # check if asset is an Royalty Enabled NFT [ @@ -761,7 +761,7 @@ async def make_nft1_offer( royalty_nft_asset_dict[asset] = amount # Then, all of the things that trigger royalties - fungible_asset_dict: Dict[Optional[bytes32], int] = {} + fungible_asset_dict: dict[Optional[bytes32], int] = {} for asset, amount in offer_dict.items(): if asset is None or driver_dict[asset].type() != AssetType.SINGLETON.value: fungible_asset_dict[asset] = amount @@ -775,7 +775,7 @@ async def make_nft1_offer( elif amount < 0: offer_side_royalty_split += 1 - trade_prices: List[Tuple[uint64, bytes32]] = [] + trade_prices: list[tuple[uint64, bytes32]] = [] for asset, amount in fungible_asset_dict.items(): # requested fungible items if amount > 0 and offer_side_royalty_split > 0: settlement_ph: bytes32 = ( @@ -783,8 +783,8 @@ async def make_nft1_offer( ) trade_prices.append((uint64(math.floor(amount / offer_side_royalty_split)), settlement_ph)) - required_royalty_info: List[Tuple[bytes32, bytes32, uint16]] = [] # [(launcher_id, address, percentage)] - offered_royalty_percentages: Dict[bytes32, uint16] = {} + required_royalty_info: list[tuple[bytes32, bytes32, uint16]] = [] # [(launcher_id, address, percentage)] + offered_royalty_percentages: dict[bytes32, uint16] = {} for asset, amount in royalty_nft_asset_dict.items(): # royalty enabled NFTs transfer_info = driver_dict[asset].also().also() # type: ignore assert isinstance(transfer_info, PuzzleInfo) @@ -806,10 +806,10 @@ async def make_nft1_offer( else: offered_royalty_percentages[asset] = uint16(royalty_percentage) - royalty_payments: Dict[Optional[bytes32], List[Tuple[bytes32, Payment]]] = {} + royalty_payments: dict[Optional[bytes32], list[tuple[bytes32, Payment]]] = {} for asset, amount in fungible_asset_dict.items(): # offered fungible items if amount < 0 and request_side_royalty_split > 0: - payment_list: List[Tuple[bytes32, Payment]] = [] + payment_list: list[tuple[bytes32, Payment]] = [] for launcher_id, address, percentage in required_royalty_info: extra_royalty_amount = uint64( math.floor(math.floor(abs(amount) / request_side_royalty_split) * (percentage / 10000)) @@ -823,14 +823,14 @@ async def make_nft1_offer( p2_ph = await wallet_state_manager.main_wallet.get_puzzle_hash( new=not action_scope.config.tx_config.reuse_puzhash ) - requested_payments: Dict[Optional[bytes32], List[Payment]] = {} + requested_payments: dict[Optional[bytes32], list[Payment]] = {} for asset, amount in offer_dict.items(): if amount > 0: requested_payments[asset] = [Payment(p2_ph, uint64(amount), [p2_ph] if asset is not None else [])] # Find all the coins we're offering - offered_coins_by_asset: Dict[Optional[bytes32], Set[Coin]] = {} - all_offered_coins: Set[Coin] = set() + offered_coins_by_asset: dict[Optional[bytes32], set[Coin]] = {} + all_offered_coins: set[Coin] = set() for asset, amount in offer_dict.items(): if amount < 0: if asset is None: @@ -845,17 +845,17 @@ async def make_nft1_offer( coin_amount_needed: int = abs(amount) + royalty_amount + fee else: coin_amount_needed = abs(amount) + royalty_amount - offered_coins: Set[Coin] = await wallet.get_coins_to_offer(asset, coin_amount_needed, action_scope) + offered_coins: set[Coin] = await wallet.get_coins_to_offer(asset, coin_amount_needed, action_scope) if len(offered_coins) == 0: raise ValueError(f"Did not have asset ID {asset.hex() if asset is not None else 'XCH'} to offer") offered_coins_by_asset[asset] = offered_coins all_offered_coins.update(offered_coins) # Notarize the payments and get the announcements for the bundle - notarized_payments: Dict[Optional[bytes32], List[NotarizedPayment]] = Offer.notarize_payments( + notarized_payments: dict[Optional[bytes32], list[NotarizedPayment]] = Offer.notarize_payments( requested_payments, list(all_offered_coins) ) - announcements_to_assert: List[AssertPuzzleAnnouncement] = Offer.calculate_announcements( + announcements_to_assert: list[AssertPuzzleAnnouncement] = Offer.calculate_announcements( notarized_payments, driver_dict ) for asset, payments in royalty_payments.items(): @@ -877,8 +877,8 @@ async def make_nft1_offer( ) # Create all of the transactions - all_transactions: List[TransactionRecord] = [] - additional_bundles: List[WalletSpendBundle] = [] + all_transactions: list[TransactionRecord] = [] + additional_bundles: list[WalletSpendBundle] = [] # standard pays the fee if possible fee_left_to_pay: uint64 = uint64(0) if None in offer_dict and offer_dict[None] < 0 else fee @@ -949,8 +949,8 @@ async def make_nft1_offer( royalty_coin: Optional[Coin] = None parent_spend: Optional[CoinSpend] = None while True: - duplicate_payments: List[Tuple[bytes32, Payment]] = [] - deduped_payment_list: List[Tuple[bytes32, Payment]] = [] + duplicate_payments: list[tuple[bytes32, Payment]] = [] + deduped_payment_list: list[tuple[bytes32, Payment]] = [] for launcher_id, payment in payments: if payment in [p for _, p in deduped_payment_list]: duplicate_payments.append((launcher_id, payment)) @@ -1050,12 +1050,12 @@ async def make_nft1_offer( async def set_bulk_nft_did( self, - nft_list: List[NFTCoinInfo], + nft_list: list[NFTCoinInfo], did_id: bytes, action_scope: WalletActionScope, fee: uint64 = uint64(0), - announcement_ids: List[bytes32] = [], - extra_conditions: Tuple[Condition, ...] = tuple(), + announcement_ids: list[bytes32] = [], + extra_conditions: tuple[Condition, ...] = tuple(), ) -> None: self.log.debug("Setting NFT DID with parameters: nft=%s did=%s", nft_list, did_id) nft_ids = [] @@ -1094,11 +1094,11 @@ async def set_bulk_nft_did( async def bulk_transfer_nft( self, - nft_list: List[NFTCoinInfo], + nft_list: list[NFTCoinInfo], puzzle_hash: bytes32, action_scope: WalletActionScope, fee: uint64 = uint64(0), - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> None: self.log.debug("Transfer NFTs %s to %s", nft_list, puzzle_hash.hex()) first = True @@ -1125,7 +1125,7 @@ async def set_nft_did( did_id: bytes, action_scope: WalletActionScope, fee: uint64 = uint64(0), - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> None: self.log.debug("Setting NFT DID with parameters: nft=%s did=%s", nft_coin_info, did_id) unft = UncurriedNFT.uncurry(*nft_coin_info.full_puzzle.uncurry()) @@ -1152,19 +1152,19 @@ async def set_nft_did( async def mint_from_did( self, - metadata_list: List[Dict[str, Any]], + metadata_list: list[dict[str, Any]], action_scope: WalletActionScope, - target_list: Optional[List[bytes32]] = [], + target_list: Optional[list[bytes32]] = [], mint_number_start: Optional[int] = 1, mint_total: Optional[int] = None, - xch_coins: Optional[Set[Coin]] = None, + xch_coins: Optional[set[Coin]] = None, xch_change_ph: Optional[bytes32] = None, new_innerpuzhash: Optional[bytes32] = None, new_p2_puzhash: Optional[bytes32] = None, did_coin: Optional[Coin] = None, did_lineage_parent: Optional[bytes32] = None, fee: Optional[uint64] = uint64(0), - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> None: """ Minting NFTs from the DID linked wallet, also used for bulk minting NFTs. @@ -1246,8 +1246,8 @@ async def mint_from_did( # Empty set to load with the announcements we will assert from DID to # match the announcements from the intermediate launcher puzzle - did_announcements: Set[Any] = set() - puzzle_assertions: Set[Any] = set() + did_announcements: set[Any] = set() + puzzle_assertions: set[Any] = set() amount = uint64(1) intermediate_coin_spends = [] launcher_spends = [] @@ -1368,11 +1368,11 @@ async def mint_from_did( xch_coins_iter = iter(xch_coins) xch_coin = next(xch_coins_iter) - message_list: List[bytes32] = [c.name() for c in xch_coins] + message_list: list[bytes32] = [c.name() for c in xch_coins] message_list.append(Coin(xch_coin.name(), xch_payment.puzzle_hash, xch_payment.amount).name()) message: bytes32 = std_hash(b"".join(message_list)) - xch_extra_conditions: Tuple[Condition, ...] = ( + xch_extra_conditions: tuple[Condition, ...] = ( AssertCoinAnnouncement(asserted_id=did_coin.name(), asserted_msg=message), ) if len(xch_coins) > 1: @@ -1457,15 +1457,15 @@ async def mint_from_did( async def mint_from_xch( self, - metadata_list: List[Dict[str, Any]], + metadata_list: list[dict[str, Any]], action_scope: WalletActionScope, - target_list: Optional[List[bytes32]] = [], + target_list: Optional[list[bytes32]] = [], mint_number_start: Optional[int] = 1, mint_total: Optional[int] = None, - xch_coins: Optional[Set[Coin]] = None, + xch_coins: Optional[set[Coin]] = None, xch_change_ph: Optional[bytes32] = None, fee: Optional[uint64] = uint64(0), - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> None: """ Minting NFTs from a single XCH spend using intermediate launcher puzzle @@ -1507,8 +1507,8 @@ async def mint_from_xch( # Empty set to load with the announcements we will assert from XCH to # match the announcements from the intermediate launcher puzzle - coin_announcements: Set[bytes32] = set() - puzzle_assertions: Set[bytes32] = set() + coin_announcements: set[bytes32] = set() + puzzle_assertions: set[bytes32] = set() primaries = [] amount = uint64(1) intermediate_coin_spends = [] @@ -1627,7 +1627,7 @@ async def mint_from_xch( for xch_coin in xch_coins: puzzle: Program = await self.standard_wallet.puzzle_for_puzzle_hash(xch_coin.puzzle_hash) if first: - message_list: List[bytes32] = [c.name() for c in xch_coins] + message_list: list[bytes32] = [c.name() for c in xch_coins] message_list.append(Coin(xch_coin.name(), xch_payment.puzzle_hash, xch_payment.amount).name()) message: bytes32 = std_hash(b"".join(message_list)) @@ -1670,7 +1670,7 @@ async def select_coins( self, amount: uint64, action_scope: WalletActionScope, - ) -> Set[Coin]: + ) -> set[Coin]: raise RuntimeError("NFTWallet does not support select_coins()") def require_derivation_paths(self) -> bool: diff --git a/chia/wallet/nft_wallet/ownership_outer_puzzle.py b/chia/wallet/nft_wallet/ownership_outer_puzzle.py index b320e16e30ab..6fe9351c0bac 100644 --- a/chia/wallet/nft_wallet/ownership_outer_puzzle.py +++ b/chia/wallet/nft_wallet/ownership_outer_puzzle.py @@ -1,7 +1,7 @@ from __future__ import annotations from dataclasses import dataclass -from typing import Callable, List, Optional, Tuple, Union +from typing import Callable, Optional, Union from clvm_tools.binutils import disassemble @@ -16,7 +16,7 @@ ) -def match_ownership_layer_puzzle(puzzle: UncurriedPuzzle) -> Tuple[bool, List[Program]]: +def match_ownership_layer_puzzle(puzzle: UncurriedPuzzle) -> tuple[bool, list[Program]]: if puzzle.mod == OWNERSHIP_LAYER_MOD: return True, list(puzzle.args.as_iter()) return False, [] diff --git a/chia/wallet/nft_wallet/singleton_outer_puzzle.py b/chia/wallet/nft_wallet/singleton_outer_puzzle.py index 341bdbfb592e..bb3743af5eb7 100644 --- a/chia/wallet/nft_wallet/singleton_outer_puzzle.py +++ b/chia/wallet/nft_wallet/singleton_outer_puzzle.py @@ -1,7 +1,7 @@ from __future__ import annotations from dataclasses import dataclass -from typing import Any, Callable, Dict, Optional +from typing import Any, Callable, Optional from chia.types.blockchain_format.coin import Coin from chia.types.blockchain_format.program import Program @@ -39,7 +39,7 @@ def match(self, puzzle: UncurriedPuzzle) -> Optional[PuzzleInfo]: assert launcher_id is not None launcher_ph = launcher_struct[1].atom assert launcher_ph is not None - constructor_dict: Dict[str, Any] = { + constructor_dict: dict[str, Any] = { "type": "singleton", "launcher_id": "0x" + launcher_id.hex(), "launcher_ph": "0x" + launcher_ph.hex(), diff --git a/chia/wallet/nft_wallet/transfer_program_puzzle.py b/chia/wallet/nft_wallet/transfer_program_puzzle.py index 7764a1833381..7d2cc957d107 100644 --- a/chia/wallet/nft_wallet/transfer_program_puzzle.py +++ b/chia/wallet/nft_wallet/transfer_program_puzzle.py @@ -1,7 +1,7 @@ from __future__ import annotations from dataclasses import dataclass -from typing import Callable, List, Optional, Tuple +from typing import Callable, Optional from chia.types.blockchain_format.program import Program from chia.types.blockchain_format.sized_bytes import bytes32 @@ -17,7 +17,7 @@ ) -def match_transfer_program_puzzle(puzzle: UncurriedPuzzle) -> Tuple[bool, List[Program]]: +def match_transfer_program_puzzle(puzzle: UncurriedPuzzle) -> tuple[bool, list[Program]]: if puzzle.mod == TRANSFER_PROGRAM_MOD: return True, list(puzzle.args.as_iter()) return False, [] diff --git a/chia/wallet/nft_wallet/uncurry_nft.py b/chia/wallet/nft_wallet/uncurry_nft.py index facb1ed3caab..33dabe84308f 100644 --- a/chia/wallet/nft_wallet/uncurry_nft.py +++ b/chia/wallet/nft_wallet/uncurry_nft.py @@ -2,7 +2,7 @@ import logging from dataclasses import dataclass -from typing import Optional, Type, TypeVar +from typing import Optional, TypeVar from chia.protocols.wallet_protocol import CoinState from chia.types.blockchain_format.program import Program @@ -91,7 +91,7 @@ class UncurriedNFT(Streamable): trade_price_percentage: Optional[uint16] @classmethod - def uncurry(cls: Type[_T_UncurriedNFT], mod: Program, curried_args: Program) -> Optional[_T_UncurriedNFT]: + def uncurry(cls: type[_T_UncurriedNFT], mod: Program, curried_args: Program) -> Optional[_T_UncurriedNFT]: """ Try to uncurry a NFT puzzle :param cls UncurriedNFT class diff --git a/chia/wallet/notification_manager.py b/chia/wallet/notification_manager.py index 596605bc8253..8be70e04aa0e 100644 --- a/chia/wallet/notification_manager.py +++ b/chia/wallet/notification_manager.py @@ -1,7 +1,7 @@ from __future__ import annotations import logging -from typing import Any, Dict, List, Optional, Set, Tuple +from typing import Any, Optional from chia_rs import G2Element @@ -53,8 +53,8 @@ async def potentially_add_new_notification(self, coin_state: CoinState, parent_s ): return False else: - memos: Dict[bytes32, List[bytes]] = compute_memos_for_spend(parent_spend) - coin_memos: List[bytes] = memos.get(coin_name, []) + memos: dict[bytes32, list[bytes]] = compute_memos_for_spend(parent_spend) + coin_memos: list[bytes] = memos.get(coin_name, []) if len(coin_memos) == 0 or len(coin_memos[0]) != 32: return False wallet_identifier = await self.wallet_state_manager.get_wallet_identifier_for_puzzle_hash( @@ -87,9 +87,9 @@ async def send_new_notification( amount: uint64, action_scope: WalletActionScope, fee: uint64 = uint64(0), - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> None: - coins: Set[Coin] = await self.wallet_state_manager.main_wallet.select_coins(uint64(amount + fee), action_scope) + coins: set[Coin] = await self.wallet_state_manager.main_wallet.select_coins(uint64(amount + fee), action_scope) origin_coin: bytes32 = next(iter(coins)).name() notification_puzzle: Program = construct_notification(target, amount) notification_hash: bytes32 = notification_puzzle.get_tree_hash() diff --git a/chia/wallet/notification_store.py b/chia/wallet/notification_store.py index bce01742bc33..76c78ff86e3b 100644 --- a/chia/wallet/notification_store.py +++ b/chia/wallet/notification_store.py @@ -3,7 +3,7 @@ import dataclasses import logging import sqlite3 -from typing import List, Optional, Tuple +from typing import Optional from chia.types.blockchain_format.sized_bytes import bytes32 from chia.util.db_wrapper import DBWrapper2 @@ -84,7 +84,7 @@ async def add_notification(self, notification: Notification) -> None: ) await cursor.close() - async def get_notifications(self, coin_ids: List[bytes32]) -> List[Notification]: + async def get_notifications(self, coin_ids: list[bytes32]) -> list[Notification]: """ Checks DB for Notification with id: id and returns it. """ @@ -111,15 +111,15 @@ async def get_notifications(self, coin_ids: List[bytes32]) -> List[Notification] ] async def get_all_notifications( - self, pagination: Optional[Tuple[Optional[int], Optional[int]]] = None - ) -> List[Notification]: + self, pagination: Optional[tuple[Optional[int], Optional[int]]] = None + ) -> list[Notification]: """ Checks DB for Notification with id: id and returns it. """ if pagination is not None: if pagination[1] is not None and pagination[0] is not None: pagination_str = " LIMIT ?, ?" - pagination_params: Tuple[int, ...] = (pagination[0], pagination[1] - pagination[0]) + pagination_params: tuple[int, ...] = (pagination[0], pagination[1] - pagination[0]) elif pagination[1] is None and pagination[0] is not None: pagination_str = " LIMIT ?, (SELECT COUNT(*) from notifications)" pagination_params = (pagination[0],) @@ -148,7 +148,7 @@ async def get_all_notifications( for row in rows ] - async def delete_notifications(self, coin_ids: List[bytes32]) -> None: + async def delete_notifications(self, coin_ids: list[bytes32]) -> None: coin_ids_str_list = "(" for _ in coin_ids: coin_ids_str_list += "?" diff --git a/chia/wallet/outer_puzzles.py b/chia/wallet/outer_puzzles.py index 5e359ab469cc..bf94bd981a81 100644 --- a/chia/wallet/outer_puzzles.py +++ b/chia/wallet/outer_puzzles.py @@ -1,7 +1,7 @@ from __future__ import annotations from enum import Enum -from typing import Dict, Optional +from typing import Optional from chia.types.blockchain_format.program import Program from chia.types.blockchain_format.sized_bytes import bytes32 @@ -74,7 +74,7 @@ def create_asset_id(constructor: PuzzleInfo) -> Optional[bytes32]: function_args = (match_puzzle, construct_puzzle, solve_puzzle, get_inner_puzzle, get_inner_solution) -driver_lookup: Dict[AssetType, DriverProtocol] = { +driver_lookup: dict[AssetType, DriverProtocol] = { AssetType.CAT: CATOuterPuzzle(*function_args), AssetType.SINGLETON: SingletonOuterPuzzle(*function_args), AssetType.METADATA: MetadataOuterPuzzle(*function_args), diff --git a/chia/wallet/payment.py b/chia/wallet/payment.py index d0c6b8ffeaee..4533ab7abc62 100644 --- a/chia/wallet/payment.py +++ b/chia/wallet/payment.py @@ -1,7 +1,6 @@ from __future__ import annotations from dataclasses import dataclass, field -from typing import List from chia.types.blockchain_format.program import Program from chia.types.blockchain_format.sized_bytes import bytes32 @@ -13,9 +12,9 @@ class Payment: puzzle_hash: bytes32 amount: uint64 - memos: List[bytes] = field(default_factory=list) + memos: list[bytes] = field(default_factory=list) - def as_condition_args(self) -> List: + def as_condition_args(self) -> list: return [self.puzzle_hash, self.amount, self.memos] def as_condition(self) -> Program: @@ -26,9 +25,9 @@ def name(self) -> bytes32: @classmethod def from_condition(cls, condition: Program) -> Payment: - python_condition: List = condition.as_python() + python_condition: list = condition.as_python() puzzle_hash, amount = python_condition[1:3] - memos: List[bytes] = [] + memos: list[bytes] = [] if len(python_condition) > 3: memos = python_condition[3] return cls(bytes32(puzzle_hash), uint64(int.from_bytes(amount, "big")), memos) diff --git a/chia/wallet/puzzle_drivers.py b/chia/wallet/puzzle_drivers.py index 5aa6817f5ebc..7da80f22083b 100644 --- a/chia/wallet/puzzle_drivers.py +++ b/chia/wallet/puzzle_drivers.py @@ -1,7 +1,7 @@ from __future__ import annotations from dataclasses import dataclass -from typing import Any, Dict, List, Optional +from typing import Any, Optional from clvm.casts import int_from_bytes from clvm.SExp import SExp @@ -25,7 +25,7 @@ class PuzzleInfo: - 'also' gets its own method as it's the supported way to do recursion of PuzzleInfos """ - info: Dict[str, Any] + info: dict[str, Any] def __post_init__(self) -> None: if "type" not in self.info: @@ -59,7 +59,7 @@ def also(self) -> Optional[PuzzleInfo]: else: return None - def check_type(self, types: List[str]) -> bool: + def check_type(self, types: list[str]) -> bool: if types == []: if self.also() is None: return True @@ -78,7 +78,7 @@ def check_type(self, types: List[str]) -> bool: @dataclass(frozen=True) class Solver: - info: Dict[str, Any] + info: dict[str, Any] def __getitem__(self, item: str) -> Any: value = self.info[item] diff --git a/chia/wallet/puzzles/clawback/drivers.py b/chia/wallet/puzzles/clawback/drivers.py index 50464e452250..acf63ef0db7f 100644 --- a/chia/wallet/puzzles/clawback/drivers.py +++ b/chia/wallet/puzzles/clawback/drivers.py @@ -1,7 +1,7 @@ from __future__ import annotations import logging -from typing import Any, List, Optional, Set, Union +from typing import Any, Optional, Union from chia.consensus.default_constants import DEFAULT_CONSTANTS from chia.types.blockchain_format.coin import Coin @@ -29,11 +29,11 @@ log = logging.getLogger(__name__) -def create_augmented_cond_puzzle(condition: List[Union[int, uint64]], puzzle: Program) -> Program: +def create_augmented_cond_puzzle(condition: list[Union[int, uint64]], puzzle: Program) -> Program: return AUGMENTED_CONDITION.curry(condition, puzzle) -def create_augmented_cond_puzzle_hash(condition: List[Any], puzzle_hash: bytes32) -> bytes32: +def create_augmented_cond_puzzle_hash(condition: list[Any], puzzle_hash: bytes32) -> bytes32: hash_of_quoted_mod_hash = calculate_hash_of_quoted_mod_hash(AUGMENTED_CONDITION_HASH) hashed_args = [Program.to(condition).get_tree_hash(), puzzle_hash] return curry_and_treehash(hash_of_quoted_mod_hash, *hashed_args) @@ -140,7 +140,7 @@ def match_clawback_puzzle( DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM // 8, ) metadata: Optional[ClawbackMetadata] = None - new_puzhash: Set[bytes32] = set() + new_puzhash: set[bytes32] = set() if conditions is not None: for condition in conditions: if ( diff --git a/chia/wallet/puzzles/clawback/puzzle_decorator.py b/chia/wallet/puzzles/clawback/puzzle_decorator.py index de29b7b90a92..359c6351f11a 100644 --- a/chia/wallet/puzzles/clawback/puzzle_decorator.py +++ b/chia/wallet/puzzles/clawback/puzzle_decorator.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Any, Dict, List, Tuple +from typing import Any from chia.types.blockchain_format.program import Program from chia.types.blockchain_format.sized_bytes import bytes32 @@ -21,7 +21,7 @@ class ClawbackPuzzleDecorator: time_lock: uint64 @staticmethod - def create(config: Dict[str, Any]) -> ClawbackPuzzleDecorator: + def create(config: dict[str, Any]) -> ClawbackPuzzleDecorator: self = ClawbackPuzzleDecorator() self.time_lock = uint64(config.get("clawback_timelock", 0)) return self @@ -34,15 +34,15 @@ def decorate_target_puzzle_hash( self, inner_puzzle: Program, target_puzzle_hash: bytes32, - ) -> Tuple[Program, bytes32]: + ) -> tuple[Program, bytes32]: return ( self.decorate(inner_puzzle), create_merkle_puzzle(self.time_lock, inner_puzzle.get_tree_hash(), target_puzzle_hash).get_tree_hash(), ) def solve( - self, inner_puzzle: Program, primaries: List[Payment], inner_solution: Program - ) -> Tuple[Program, Program]: + self, inner_puzzle: Program, primaries: list[Payment], inner_solution: Program + ) -> tuple[Program, Program]: # Append REMARK condition [1, "CLAWBACK", TIME_LOCK, SENDER_PUZHSAH, RECIPIENT_PUZHSAH] if len(primaries) == 1: recipient_puzhash = primaries[0].puzzle_hash @@ -61,7 +61,7 @@ def solve( return self.decorate(inner_puzzle), inner_solution def decorate_memos( - self, inner_puzzle: Program, target_puzzle_hash: bytes32, memos: List[bytes] - ) -> Tuple[Program, List[bytes]]: + self, inner_puzzle: Program, target_puzzle_hash: bytes32, memos: list[bytes] + ) -> tuple[Program, list[bytes]]: memos.insert(0, target_puzzle_hash) return self.decorate(inner_puzzle), memos diff --git a/chia/wallet/puzzles/load_clvm.py b/chia/wallet/puzzles/load_clvm.py index 9f684e5fac2f..310cf2ab02c3 100644 --- a/chia/wallet/puzzles/load_clvm.py +++ b/chia/wallet/puzzles/load_clvm.py @@ -6,7 +6,6 @@ import pathlib import sys import tempfile -from typing import List import importlib_resources from clvm_tools_rs import compile_clvm as compile_clvm_rust @@ -40,7 +39,7 @@ def translate_path(p_): compile_clvm_py = compile_clvm_py_candidate -def compile_clvm_in_lock(full_path: pathlib.Path, output: pathlib.Path, search_paths: List[pathlib.Path]): +def compile_clvm_in_lock(full_path: pathlib.Path, output: pathlib.Path, search_paths: list[pathlib.Path]): # Compile using rust (default) # Ensure path translation is done in the idiomatic way currently @@ -75,7 +74,7 @@ def sha256file(f): return res -def compile_clvm(full_path: pathlib.Path, output: pathlib.Path, search_paths: List[pathlib.Path] = []): +def compile_clvm(full_path: pathlib.Path, output: pathlib.Path, search_paths: list[pathlib.Path] = []): with Lockfile.create(pathlib.Path(tempfile.gettempdir()) / "clvm_compile" / full_path.name): compile_clvm_in_lock(full_path, output, search_paths) diff --git a/chia/wallet/puzzles/puzzle_utils.py b/chia/wallet/puzzles/puzzle_utils.py index 1de88958dffb..c90bd3649868 100644 --- a/chia/wallet/puzzles/puzzle_utils.py +++ b/chia/wallet/puzzles/puzzle_utils.py @@ -1,34 +1,34 @@ from __future__ import annotations -from typing import Any, List +from typing import Any from chia.types.blockchain_format.sized_bytes import bytes32 from chia.types.condition_opcodes import ConditionOpcode from chia.util.ints import uint64 -def make_create_coin_condition(puzzle_hash: bytes32, amount: uint64, memos: List[bytes]) -> List[Any]: +def make_create_coin_condition(puzzle_hash: bytes32, amount: uint64, memos: list[bytes]) -> list[Any]: condition = [ConditionOpcode.CREATE_COIN, puzzle_hash, amount] if len(memos) > 0: condition.append(memos) return condition -def make_reserve_fee_condition(fee: uint64) -> List[Any]: +def make_reserve_fee_condition(fee: uint64) -> list[Any]: return [ConditionOpcode.RESERVE_FEE, fee] -def make_assert_coin_announcement(announcement_hash: bytes32) -> List[bytes]: +def make_assert_coin_announcement(announcement_hash: bytes32) -> list[bytes]: return [ConditionOpcode.ASSERT_COIN_ANNOUNCEMENT, announcement_hash] -def make_assert_puzzle_announcement(announcement_hash: bytes32) -> List[bytes]: +def make_assert_puzzle_announcement(announcement_hash: bytes32) -> list[bytes]: return [ConditionOpcode.ASSERT_PUZZLE_ANNOUNCEMENT, announcement_hash] -def make_create_coin_announcement(message: bytes) -> List[bytes]: +def make_create_coin_announcement(message: bytes) -> list[bytes]: return [ConditionOpcode.CREATE_COIN_ANNOUNCEMENT, message] -def make_create_puzzle_announcement(message: bytes) -> List[bytes]: +def make_create_puzzle_announcement(message: bytes) -> list[bytes]: return [ConditionOpcode.CREATE_PUZZLE_ANNOUNCEMENT, message] diff --git a/chia/wallet/puzzles/singleton_top_layer.py b/chia/wallet/puzzles/singleton_top_layer.py index 596d6fa92284..d1c1752e6190 100644 --- a/chia/wallet/puzzles/singleton_top_layer.py +++ b/chia/wallet/puzzles/singleton_top_layer.py @@ -1,6 +1,7 @@ from __future__ import annotations -from typing import Iterator, List, Optional, Tuple +from collections.abc import Iterator +from typing import Optional from chia.types.blockchain_format.coin import Coin from chia.types.blockchain_format.program import Program @@ -159,7 +160,7 @@ # -def match_singleton_puzzle(puzzle: Program) -> Tuple[bool, Iterator[Program]]: +def match_singleton_puzzle(puzzle: Program) -> tuple[bool, Iterator[Program]]: mod, curried_args = puzzle.uncurry() if mod == SINGLETON_MOD: return True, curried_args.as_iter() @@ -185,8 +186,8 @@ def adapt_inner_puzzle_hash_to_singleton(inner_puzzle_hash: bytes32) -> bytes32: # Take standard coin and amount -> launch conditions & launcher coin solution def launch_conditions_and_coinsol( - coin: Coin, inner_puzzle: Program, comment: List[Tuple[str, str]], amount: uint64 -) -> Tuple[List[Program], CoinSpend]: + coin: Coin, inner_puzzle: Program, comment: list[tuple[str, str]], amount: uint64 +) -> tuple[list[Program], CoinSpend]: if (amount % 2) == 0: raise ValueError("Coin amount cannot be even. Subtract one mojo.") @@ -271,7 +272,7 @@ def claim_p2_singleton( launcher_id: bytes32, delay_time: Optional[uint64] = None, delay_ph: Optional[bytes32] = None, -) -> Tuple[Program, Program, CoinSpend]: +) -> tuple[Program, Program, CoinSpend]: assertion = Program.to([ConditionOpcode.ASSERT_COIN_ANNOUNCEMENT, std_hash(p2_singleton_coin.name() + b"$")]) announcement = Program.to([ConditionOpcode.CREATE_PUZZLE_ANNOUNCEMENT, p2_singleton_coin.name()]) if delay_time is None or delay_ph is None: diff --git a/chia/wallet/puzzles/singleton_top_layer_v1_1.py b/chia/wallet/puzzles/singleton_top_layer_v1_1.py index bcbf2409d761..bdf9c845a560 100644 --- a/chia/wallet/puzzles/singleton_top_layer_v1_1.py +++ b/chia/wallet/puzzles/singleton_top_layer_v1_1.py @@ -1,6 +1,7 @@ from __future__ import annotations -from typing import Iterator, List, Optional, Tuple +from collections.abc import Iterator +from typing import Optional from chia.types.blockchain_format.coin import Coin from chia.types.blockchain_format.program import Program @@ -160,7 +161,7 @@ # -def match_singleton_puzzle(puzzle: UncurriedPuzzle) -> Tuple[bool, Iterator[Program]]: +def match_singleton_puzzle(puzzle: UncurriedPuzzle) -> tuple[bool, Iterator[Program]]: if puzzle.mod == SINGLETON_MOD: return True, puzzle.args.as_iter() else: @@ -176,9 +177,9 @@ def generate_launcher_coin(coin: Coin, amount: uint64) -> Coin: def launch_conditions_and_coinsol( coin: Coin, inner_puzzle: Program, - comment: List[Tuple[str, str]], + comment: list[tuple[str, str]], amount: uint64, -) -> Tuple[List[Program], CoinSpend]: +) -> tuple[list[Program], CoinSpend]: if (amount % 2) == 0: raise ValueError("Coin amount cannot be even. Subtract one mojo.") @@ -309,7 +310,7 @@ def claim_p2_singleton( launcher_id: bytes32, delay_time: Optional[uint64] = None, delay_ph: Optional[bytes32] = None, -) -> Tuple[Program, Program, CoinSpend]: +) -> tuple[Program, Program, CoinSpend]: assertion = Program.to([ConditionOpcode.ASSERT_COIN_ANNOUNCEMENT, std_hash(p2_singleton_coin.name() + b"$")]) announcement = Program.to([ConditionOpcode.CREATE_PUZZLE_ANNOUNCEMENT, p2_singleton_coin.name()]) if delay_time is None or delay_ph is None: diff --git a/chia/wallet/puzzles/tails.py b/chia/wallet/puzzles/tails.py index e2cadd3ec4e5..e335c116dbcf 100644 --- a/chia/wallet/puzzles/tails.py +++ b/chia/wallet/puzzles/tails.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Any, Dict, List, Optional, Tuple +from typing import Any, Optional from chia_rs import Coin @@ -43,20 +43,20 @@ class LimitationsProgram: @staticmethod - def match(uncurried_mod: Program, curried_args: Program) -> Tuple[bool, List[Program]]: + def match(uncurried_mod: Program, curried_args: Program) -> tuple[bool, list[Program]]: raise NotImplementedError("Need to implement 'match' on limitations programs") @staticmethod - def construct(args: List[Program]) -> Program: + def construct(args: list[Program]) -> Program: raise NotImplementedError("Need to implement 'construct' on limitations programs") @staticmethod - def solve(args: List[Program], solution_dict: Dict) -> Program: + def solve(args: list[Program], solution_dict: dict) -> Program: raise NotImplementedError("Need to implement 'solve' on limitations programs") @classmethod async def generate_issuance_bundle( - cls, wallet, cat_tail_info: Dict, amount: uint64, action_scope: WalletActionScope + cls, wallet, cat_tail_info: dict, amount: uint64, action_scope: WalletActionScope ) -> WalletSpendBundle: raise NotImplementedError("Need to implement 'generate_issuance_bundle' on limitations programs") @@ -68,7 +68,7 @@ class GenesisById(LimitationsProgram): """ @staticmethod - def match(uncurried_mod: Program, curried_args: Program) -> Tuple[bool, List[Program]]: + def match(uncurried_mod: Program, curried_args: Program) -> tuple[bool, list[Program]]: if uncurried_mod == GENESIS_BY_ID_MOD: genesis_id = curried_args.first() return True, [genesis_id] @@ -76,18 +76,18 @@ def match(uncurried_mod: Program, curried_args: Program) -> Tuple[bool, List[Pro return False, [] @staticmethod - def construct(args: List[Program]) -> Program: + def construct(args: list[Program]) -> Program: return GENESIS_BY_ID_MOD.curry(args[0]) @staticmethod - def solve(args: List[Program], solution_dict: Dict) -> Program: + def solve(args: list[Program], solution_dict: dict) -> Program: return Program.to([]) @classmethod async def generate_issuance_bundle( cls, wallet, - _: Dict, + _: dict, amount: uint64, action_scope: WalletActionScope, fee: uint64 = uint64(0), @@ -153,7 +153,7 @@ class GenesisByPuzhash(LimitationsProgram): """ @staticmethod - def match(uncurried_mod: Program, curried_args: Program) -> Tuple[bool, List[Program]]: + def match(uncurried_mod: Program, curried_args: Program) -> tuple[bool, list[Program]]: if uncurried_mod == GENESIS_BY_PUZHASH_MOD: genesis_puzhash = curried_args.first() return True, [genesis_puzhash] @@ -161,11 +161,11 @@ def match(uncurried_mod: Program, curried_args: Program) -> Tuple[bool, List[Pro return False, [] @staticmethod - def construct(args: List[Program]) -> Program: + def construct(args: list[Program]) -> Program: return GENESIS_BY_PUZHASH_MOD.curry(args[0]) @staticmethod - def solve(args: List[Program], solution_dict: Dict) -> Program: + def solve(args: list[Program], solution_dict: dict) -> Program: pid = hexstr_to_bytes(solution_dict["parent_coin_info"]) return Program.to([pid, solution_dict["amount"]]) @@ -176,7 +176,7 @@ class EverythingWithSig(LimitationsProgram): """ @staticmethod - def match(uncurried_mod: Program, curried_args: Program) -> Tuple[bool, List[Program]]: + def match(uncurried_mod: Program, curried_args: Program) -> tuple[bool, list[Program]]: if uncurried_mod == EVERYTHING_WITH_SIG_MOD: pubkey = curried_args.first() return True, [pubkey] @@ -184,11 +184,11 @@ def match(uncurried_mod: Program, curried_args: Program) -> Tuple[bool, List[Pro return False, [] @staticmethod - def construct(args: List[Program]) -> Program: + def construct(args: list[Program]) -> Program: return EVERYTHING_WITH_SIG_MOD.curry(args[0]) @staticmethod - def solve(args: List[Program], solution_dict: Dict) -> Program: + def solve(args: list[Program], solution_dict: dict) -> Program: return Program.to([]) @@ -198,7 +198,7 @@ class DelegatedLimitations(LimitationsProgram): """ @staticmethod - def match(uncurried_mod: Program, curried_args: Program) -> Tuple[bool, List[Program]]: + def match(uncurried_mod: Program, curried_args: Program) -> tuple[bool, list[Program]]: if uncurried_mod == DELEGATED_LIMITATIONS_MOD: pubkey = curried_args.first() return True, [pubkey] @@ -206,11 +206,11 @@ def match(uncurried_mod: Program, curried_args: Program) -> Tuple[bool, List[Pro return False, [] @staticmethod - def construct(args: List[Program]) -> Program: + def construct(args: list[Program]) -> Program: return DELEGATED_LIMITATIONS_MOD.curry(args[0]) @staticmethod - def solve(args: List[Program], solution_dict: Dict) -> Program: + def solve(args: list[Program], solution_dict: dict) -> Program: signed_program = ALL_LIMITATIONS_PROGRAMS[solution_dict["signed_program"]["identifier"]] inner_program_args = [Program.fromhex(item) for item in solution_dict["signed_program"]["args"]] inner_solution_dict = solution_dict["program_arguments"] @@ -228,7 +228,7 @@ class GenesisByIdOrSingleton(LimitationsProgram): """ @staticmethod - def match(uncurried_mod: Program, curried_args: Program) -> Tuple[bool, List[Program]]: # pragma: no cover + def match(uncurried_mod: Program, curried_args: Program) -> tuple[bool, list[Program]]: # pragma: no cover if uncurried_mod == GENESIS_BY_ID_OR_SINGLETON_MOD: genesis_id = curried_args.first() return True, [genesis_id] @@ -236,14 +236,14 @@ def match(uncurried_mod: Program, curried_args: Program) -> Tuple[bool, List[Pro return False, [] @staticmethod - def construct(args: List[Program]) -> Program: + def construct(args: list[Program]) -> Program: return GENESIS_BY_ID_OR_SINGLETON_MOD.curry( args[0], args[1], ) @staticmethod - def solve(args: List[Program], solution_dict: Dict) -> Program: # pragma: no cover + def solve(args: list[Program], solution_dict: dict) -> Program: # pragma: no cover pid = hexstr_to_bytes(solution_dict["parent_coin_info"]) return Program.to([pid, solution_dict["amount"]]) @@ -251,13 +251,13 @@ def solve(args: List[Program], solution_dict: Dict) -> Program: # pragma: no co async def generate_issuance_bundle( cls, wallet, - tail_info: Dict, + tail_info: dict, amount: uint64, action_scope: WalletActionScope, fee: uint64 = uint64(0), ) -> WalletSpendBundle: if "coins" in tail_info: - coins: List[Coin] = tail_info["coins"] + coins: list[Coin] = tail_info["coins"] origin_id = coins.copy().pop().name() else: # pragma: no cover coins = await wallet.standard_wallet.select_coins(amount + fee, action_scope) @@ -326,7 +326,7 @@ async def generate_issuance_bundle( # This should probably be much more elegant than just a dictionary with strings as identifiers # Right now this is small and experimental so it can stay like this -ALL_LIMITATIONS_PROGRAMS: Dict[str, Any] = { +ALL_LIMITATIONS_PROGRAMS: dict[str, Any] = { "genesis_by_id": GenesisById, "genesis_by_puzhash": GenesisByPuzhash, "everything_with_signature": EverythingWithSig, @@ -335,7 +335,7 @@ async def generate_issuance_bundle( } -def match_limitations_program(limitations_program: Program) -> Tuple[Optional[LimitationsProgram], List[Program]]: +def match_limitations_program(limitations_program: Program) -> tuple[Optional[LimitationsProgram], list[Program]]: uncurried_mod, curried_args = limitations_program.uncurry() for key, lp in ALL_LIMITATIONS_PROGRAMS.items(): matched, args = lp.match(uncurried_mod, curried_args) diff --git a/chia/wallet/signer_protocol.py b/chia/wallet/signer_protocol.py index bd6800735359..18670447bb4d 100644 --- a/chia/wallet/signer_protocol.py +++ b/chia/wallet/signer_protocol.py @@ -1,7 +1,6 @@ from __future__ import annotations from dataclasses import dataclass -from typing import List from chia.types.blockchain_format.coin import Coin as _Coin from chia.types.blockchain_format.program import Program @@ -58,7 +57,7 @@ def as_coin_spend(self) -> CoinSpend: @clvm_streamable @dataclass(frozen=True) class TransactionInfo(Streamable): - spends: List[Spend] + spends: list[Spend] @clvm_streamable @@ -72,7 +71,7 @@ class SigningTarget(Streamable): @clvm_streamable @dataclass(frozen=True) class SumHint(Streamable): - fingerprints: List[bytes] + fingerprints: list[bytes] synthetic_offset: bytes final_pubkey: bytes @@ -81,21 +80,21 @@ class SumHint(Streamable): @dataclass(frozen=True) class PathHint(Streamable): root_fingerprint: bytes - path: List[uint64] + path: list[uint64] @clvm_streamable @dataclass(frozen=True) class KeyHints(Streamable): - sum_hints: List[SumHint] - path_hints: List[PathHint] + sum_hints: list[SumHint] + path_hints: list[PathHint] @clvm_streamable @dataclass(frozen=True) class SigningInstructions(Streamable): key_hints: KeyHints - targets: List[SigningTarget] + targets: list[SigningTarget] @clvm_streamable @@ -123,4 +122,4 @@ class Signature(Streamable): @dataclass(frozen=True) class SignedTransaction(Streamable): transaction_info: TransactionInfo - signatures: List[Signature] + signatures: list[Signature] diff --git a/chia/wallet/singleton.py b/chia/wallet/singleton.py index 916cc888545f..9ad8d9787adf 100644 --- a/chia/wallet/singleton.py +++ b/chia/wallet/singleton.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import List, Optional, Union +from typing import Optional, Union from chia.types.blockchain_format.coin import Coin from chia.types.blockchain_format.program import Program @@ -94,7 +94,7 @@ def create_singleton_puzzle(innerpuz: Union[Program, SerializedProgram], launche def get_most_recent_singleton_coin_from_coin_spend(coin_sol: CoinSpend) -> Optional[Coin]: - additions: List[Coin] = compute_additions(coin_sol) + additions: list[Coin] = compute_additions(coin_sol) for coin in additions: if coin.amount % 2 == 1: return coin diff --git a/chia/wallet/trade_manager.py b/chia/wallet/trade_manager.py index 48b081ef1100..b29c89a6ca99 100644 --- a/chia/wallet/trade_manager.py +++ b/chia/wallet/trade_manager.py @@ -4,7 +4,7 @@ import logging import time from collections import deque -from typing import TYPE_CHECKING, Any, Deque, Dict, List, Optional, Set, Tuple, Union +from typing import TYPE_CHECKING, Any, Deque, Optional, Union from typing_extensions import Literal @@ -97,7 +97,7 @@ class TradeManager: wallet_state_manager: WalletStateManager log: logging.Logger trade_store: TradeStore - most_recently_deserialized_trade: Optional[Tuple[bytes32, Offer]] + most_recently_deserialized_trade: Optional[tuple[bytes32, Offer]] @staticmethod async def create( @@ -116,13 +116,13 @@ async def create( self.most_recently_deserialized_trade = None return self - async def get_offers_with_status(self, status: TradeStatus) -> List[TradeRecord]: + async def get_offers_with_status(self, status: TradeStatus) -> list[TradeRecord]: records = await self.trade_store.get_trade_record_with_status(status) return records async def get_coins_of_interest( self, - ) -> Set[bytes32]: + ) -> set[bytes32]: """ Returns list of coins we want to check if they are included in filter, These will include coins that belong to us and coins that that on other side of treade @@ -171,11 +171,11 @@ async def coins_of_interest_farmed( result = await self.wallet_state_manager.coin_store.get_coin_records( coin_id_filter=HashFilter.include(primary_coin_ids) ) - our_primary_coins: List[Coin] = [cr.coin for cr in result.records] - our_additions: List[Coin] = list( + our_primary_coins: list[Coin] = [cr.coin for cr in result.records] + our_additions: list[Coin] = list( filter(lambda c: offer.get_root_removal(c) in our_primary_coins, offer.additions()) ) - our_addition_ids: List[bytes32] = [c.name() for c in our_additions] + our_addition_ids: list[bytes32] = [c.name() for c in our_additions] # And get all relevant coin states coin_states = await self.wallet_state_manager.wallet_node.get_coin_state( @@ -184,13 +184,13 @@ async def coins_of_interest_farmed( fork_height=fork_height, ) assert coin_states is not None - coin_state_names: List[bytes32] = [cs.coin.name() for cs in coin_states] + coin_state_names: list[bytes32] = [cs.coin.name() for cs in coin_states] # If any of our settlement_payments were spent, this offer was a success! if set(our_addition_ids) == set(coin_state_names): height = coin_state.spent_height assert height is not None await self.trade_store.set_status(trade.trade_id, TradeStatus.CONFIRMED, index=height) - tx_records: List[TransactionRecord] = await self.calculate_tx_records_for_offer(offer, False) + tx_records: list[TransactionRecord] = await self.calculate_tx_records_for_offer(offer, False) for tx in tx_records: if TradeStatus(trade.status) == TradeStatus.PENDING_ACCEPT: await self.wallet_state_manager.add_transaction( @@ -208,7 +208,7 @@ async def coins_of_interest_farmed( await self.trade_store.set_status(trade.trade_id, TradeStatus.FAILED) self.log.warning(f"Trade with id: {trade.trade_id} failed") - async def get_locked_coins(self) -> Dict[bytes32, WalletCoinRecord]: + async def get_locked_coins(self) -> dict[bytes32, WalletCoinRecord]: """Returns a dictionary of confirmed coins that are locked by a trade.""" all_pending = [] pending_accept = await self.get_offers_with_status(TradeStatus.PENDING_ACCEPT) @@ -230,8 +230,8 @@ async def get_locked_coins(self) -> Dict[bytes32, WalletCoinRecord]: ) ).coin_id_to_record - async def get_all_trades(self) -> List[TradeRecord]: - all: List[TradeRecord] = await self.trade_store.get_all_trades() + async def get_all_trades(self) -> list[TradeRecord]: + all: list[TradeRecord] = await self.trade_store.get_all_trades() return all async def get_trade_by_id(self, trade_id: bytes32) -> Optional[TradeRecord]: @@ -244,19 +244,19 @@ async def fail_pending_offer(self, trade_id: bytes32) -> None: async def cancel_pending_offers( self, - trades: List[bytes32], + trades: list[bytes32], action_scope: WalletActionScope, fee: uint64 = uint64(0), secure: bool = True, # Cancel with a transaction on chain - trade_cache: Dict[bytes32, TradeRecord] = {}, # Optional pre-fetched trade records for optimization - extra_conditions: Tuple[Condition, ...] = tuple(), + trade_cache: dict[bytes32, TradeRecord] = {}, # Optional pre-fetched trade records for optimization + extra_conditions: tuple[Condition, ...] = tuple(), ) -> None: """This will create a transaction that includes coins that were offered""" # Need to do some pre-figuring of announcements that will be need to be made announcement_nonce: bytes32 = std_hash(b"".join(trades)) - trade_records: List[TradeRecord] = [] - all_cancellation_coins: List[List[Coin]] = [] + trade_records: list[TradeRecord] = [] + all_cancellation_coins: list[list[Coin]] = [] announcement_creations: Deque[CreateCoinAnnouncement] = deque() announcement_assertions: Deque[AssertCoinAnnouncement] = deque() for trade_id in trades: @@ -282,7 +282,7 @@ async def cancel_pending_offers( # Make every coin assert the announcement from the one before them announcement_assertions.rotate(1) - all_txs: List[TransactionRecord] = [] + all_txs: list[TransactionRecord] = [] fee_to_pay: uint64 = fee for trade, cancellation_coins in zip(trade_records, all_cancellation_coins): self.log.info(f"Secure-Cancel pending offer with id trade_id {trade.trade_id.hex()}") @@ -292,7 +292,7 @@ async def cancel_pending_offers( await self.trade_store.set_status(trade.trade_id, TradeStatus.CANCELLED) continue - cancellation_additions: List[Coin] = [] + cancellation_additions: list[Coin] = [] valid_times: ConditionValidTimes = parse_timelock_info(extra_conditions) for coin in cancellation_coins: wallet = await self.wallet_state_manager.get_wallet_for_coin(coin.name()) @@ -306,7 +306,7 @@ async def cancel_pending_offers( ) if len(trade_records) > 1 or len(cancellation_coins) > 1: - announcement_conditions: Tuple[Condition, ...] = ( + announcement_conditions: tuple[Condition, ...] = ( announcement_creations.popleft(), announcement_assertions.popleft(), ) @@ -318,7 +318,7 @@ async def cancel_pending_offers( if wallet.type() == WalletType.STANDARD_WALLET: assert isinstance(wallet, Wallet) if fee_to_pay > coin.amount: - selected_coins: Set[Coin] = await wallet.select_coins( + selected_coins: set[Coin] = await wallet.select_coins( uint64(fee_to_pay - coin.amount), action_scope, ) @@ -415,9 +415,9 @@ async def save_trade(self, trade: TradeRecord, offer: Offer) -> None: await self.trade_store.add_trade_record(trade, offer_name) # We want to subscribe to the coin IDs of all coins that are not the ephemeral offer coins - offered_coins: Set[Coin] = {value for values in offer.get_offered_coins().values() for value in values} - non_offer_additions: Set[Coin] = set(offer.additions()) ^ offered_coins - non_offer_removals: Set[Coin] = set(offer.removals()) ^ offered_coins + offered_coins: set[Coin] = {value for values in offer.get_offered_coins().values() for value in values} + non_offer_additions: set[Coin] = set(offer.additions()) ^ offered_coins + non_offer_removals: set[Coin] = set(offer.removals()) ^ offered_coins await self.wallet_state_manager.add_interested_coin_ids( [coin.name() for coin in (*non_offer_removals, *non_offer_additions)] ) @@ -426,15 +426,15 @@ async def save_trade(self, trade: TradeRecord, offer: Offer) -> None: async def create_offer_for_ids( self, - offer: Dict[Union[int, bytes32], int], + offer: dict[Union[int, bytes32], int], action_scope: WalletActionScope, - driver_dict: Optional[Dict[bytes32, PuzzleInfo]] = None, + driver_dict: Optional[dict[bytes32, PuzzleInfo]] = None, solver: Optional[Solver] = None, fee: uint64 = uint64(0), validate_only: bool = False, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), taking: bool = False, - ) -> Union[Tuple[Literal[True], TradeRecord, None], Tuple[Literal[False], None, str]]: + ) -> Union[tuple[Literal[True], TradeRecord, None], tuple[Literal[False], None, str]]: if driver_dict is None: driver_dict = {} if solver is None: @@ -476,14 +476,14 @@ async def create_offer_for_ids( async def _create_offer_for_ids( self, - offer_dict: Dict[Union[int, bytes32], int], + offer_dict: dict[Union[int, bytes32], int], action_scope: WalletActionScope, - driver_dict: Optional[Dict[bytes32, PuzzleInfo]] = None, + driver_dict: Optional[dict[bytes32, PuzzleInfo]] = None, solver: Optional[Solver] = None, fee: uint64 = uint64(0), - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), taking: bool = False, - ) -> Union[Tuple[Literal[True], Offer, None], Tuple[Literal[False], None, str]]: + ) -> Union[tuple[Literal[True], Offer, None], tuple[Literal[False], None, str]]: """ Offer is dictionary of wallet ids and amount """ @@ -492,16 +492,16 @@ async def _create_offer_for_ids( if solver is None: solver = Solver({}) try: - coins_to_offer: Dict[Union[int, bytes32], Set[Coin]] = {} - requested_payments: Dict[Optional[bytes32], List[Payment]] = {} - offer_dict_no_ints: Dict[Optional[bytes32], int] = {} + coins_to_offer: dict[Union[int, bytes32], set[Coin]] = {} + requested_payments: dict[Optional[bytes32], list[Payment]] = {} + offer_dict_no_ints: dict[Optional[bytes32], int] = {} for id, amount in offer_dict.items(): asset_id: Optional[bytes32] = None # asset_id can either be none if asset is XCH or # bytes32 if another asset (e.g. NFT, CAT) if amount > 0: # this is what we are receiving in the trade - memos: List[bytes] = [] + memos: list[bytes] = [] if isinstance(id, int): wallet_id = uint32(id) wallet = self.wallet_state_manager.wallets.get(wallet_id) @@ -601,13 +601,13 @@ async def _create_offer_for_ids( if potential_special_offer is not None: return True, potential_special_offer, None - all_coins: List[Coin] = [c for coins in coins_to_offer.values() for c in coins] - notarized_payments: Dict[Optional[bytes32], List[NotarizedPayment]] = Offer.notarize_payments( + all_coins: list[Coin] = [c for coins in coins_to_offer.values() for c in coins] + notarized_payments: dict[Optional[bytes32], list[NotarizedPayment]] = Offer.notarize_payments( requested_payments, all_coins ) announcements_to_assert = Offer.calculate_announcements(notarized_payments, driver_dict) - all_transactions: List[TransactionRecord] = [] + all_transactions: list[TransactionRecord] = [] fee_left_to_pay: uint64 = fee # The access of the sorted keys here makes sure we create the XCH transaction first to make sure we pay fee # with the XCH side of the offer and don't create an extra fee transaction in other wallets. @@ -689,9 +689,9 @@ async def maybe_create_wallets_for_offer(self, offer: Offer) -> None: await wsm.create_wallet_for_puzzle_info(offer.driver_dict[key]) async def check_offer_validity(self, offer: Offer, peer: WSChiaConnection) -> bool: - all_removals: List[Coin] = offer.removals() - all_removal_names: List[bytes32] = [c.name() for c in all_removals] - non_ephemeral_removals: List[Coin] = list( + all_removals: list[Coin] = offer.removals() + all_removal_names: list[bytes32] = [c.name() for c in all_removals] + non_ephemeral_removals: list[Coin] = list( filter(lambda c: c.parent_coin_info not in all_removal_names, all_removals) ) coin_states = await self.wallet_state_manager.wallet_node.get_coin_state( @@ -700,27 +700,27 @@ async def check_offer_validity(self, offer: Offer, peer: WSChiaConnection) -> bo return len(coin_states) == len(non_ephemeral_removals) and all([cs.spent_height is None for cs in coin_states]) - async def calculate_tx_records_for_offer(self, offer: Offer, validate: bool) -> List[TransactionRecord]: + async def calculate_tx_records_for_offer(self, offer: Offer, validate: bool) -> list[TransactionRecord]: if validate: final_spend_bundle: WalletSpendBundle = offer.to_valid_spend() - hint_dict: Dict[bytes32, bytes32] = {} - additions_dict: Dict[bytes32, Coin] = {} + hint_dict: dict[bytes32, bytes32] = {} + additions_dict: dict[bytes32, Coin] = {} for hinted_coins, _ in ( compute_spend_hints_and_additions(spend) for spend in final_spend_bundle.coin_spends ): hint_dict.update({id: hc.hint for id, hc in hinted_coins.items() if hc.hint is not None}) additions_dict.update({id: hc.coin for id, hc in hinted_coins.items()}) - all_additions: List[Coin] = list(a for a in additions_dict.values()) + all_additions: list[Coin] = list(a for a in additions_dict.values()) else: final_spend_bundle = offer._bundle hint_dict = offer.hints() all_additions = offer.additions() - settlement_coins: List[Coin] = [c for coins in offer.get_offered_coins().values() for c in coins] - settlement_coin_ids: List[bytes32] = [c.name() for c in settlement_coins] + settlement_coins: list[Coin] = [c for coins in offer.get_offered_coins().values() for c in coins] + settlement_coin_ids: list[bytes32] = [c.name() for c in settlement_coins] - removals: List[Coin] = final_spend_bundle.removals() - additions: List[Coin] = list(a for a in all_additions if a not in removals) + removals: list[Coin] = final_spend_bundle.removals() + additions: list[Coin] = list(a for a in all_additions if a not in removals) valid_times: ConditionValidTimes = parse_timelock_info( parse_conditions_non_consensus( condition @@ -733,7 +733,7 @@ async def calculate_tx_records_for_offer(self, offer: Offer, validate: bool) -> txs = [] - addition_dict: Dict[uint32, List[Coin]] = {} + addition_dict: dict[uint32, list[Coin]] = {} for addition in additions: wallet_identifier = await self.wallet_state_manager.get_wallet_identifier_for_coin( addition, @@ -770,7 +770,7 @@ async def calculate_tx_records_for_offer(self, offer: Offer, validate: bool) -> addition_dict[wallet_identifier.id].append(addition) # While we want additions to show up as separate records, removals of the same wallet should show as one - removal_dict: Dict[uint32, List[Coin]] = {} + removal_dict: dict[uint32, list[Coin]] = {} for removal in removals: wallet_identifier = await self.wallet_state_manager.get_wallet_identifier_for_coin( removal, @@ -780,7 +780,7 @@ async def calculate_tx_records_for_offer(self, offer: Offer, validate: bool) -> removal_dict.setdefault(wallet_identifier.id, []) removal_dict[wallet_identifier.id].append(removal) - all_removals: List[bytes32] = [r.name() for removals in removal_dict.values() for r in removals] + all_removals: list[bytes32] = [r.name() for removals in removal_dict.values() for r in removals] for wid, grouped_removals in removal_dict.items(): wallet = self.wallet_state_manager.wallets[wid] @@ -788,12 +788,12 @@ async def calculate_tx_records_for_offer(self, offer: Offer, validate: bool) -> removal_tree_hash = Program.to([coin_as_list(rem) for rem in grouped_removals]).get_tree_hash() # We also need to calculate the sent amount removed: int = sum(c.amount for c in grouped_removals) - removed_ids: List[bytes32] = [c.name() for c in grouped_removals] - all_additions_from_grouped_removals: List[Coin] = [ + removed_ids: list[bytes32] = [c.name() for c in grouped_removals] + all_additions_from_grouped_removals: list[Coin] = [ c for c in all_additions if c.parent_coin_info in removed_ids ] - potential_change_coins: List[Coin] = addition_dict[wid] if wid in addition_dict else [] - change_coins: List[Coin] = [c for c in potential_change_coins if c.parent_coin_info in all_removals] + potential_change_coins: list[Coin] = addition_dict[wid] if wid in addition_dict else [] + change_coins: list[Coin] = [c for c in potential_change_coins if c.parent_coin_info in all_removals] change_amount: int = sum(c.amount for c in change_coins) sent_amount: int = ( removed @@ -835,12 +835,12 @@ async def respond_to_offer( action_scope: WalletActionScope, solver: Optional[Solver] = None, fee: uint64 = uint64(0), - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> TradeRecord: if solver is None: solver = Solver({}) - take_offer_dict: Dict[Union[bytes32, int], int] = {} - arbitrage: Dict[Optional[bytes32], int] = offer.arbitrage() + take_offer_dict: dict[Union[bytes32, int], int] = {} + arbitrage: dict[Optional[bytes32], int] = offer.arbitrage() for asset_id, amount in arbitrage.items(): if asset_id is None: @@ -890,7 +890,7 @@ async def respond_to_offer( ) await self.maybe_create_wallets_for_offer(complete_offer) - tx_records: List[TransactionRecord] = await self.calculate_tx_records_for_offer(complete_offer, True) + tx_records: list[TransactionRecord] = await self.calculate_tx_records_for_offer(complete_offer, True) trade_record: TradeRecord = TradeRecord( confirmed_at_index=uint32(0), @@ -917,12 +917,12 @@ async def respond_to_offer( async def check_for_special_offer_making( self, - offer_dict: Dict[Optional[bytes32], int], - driver_dict: Dict[bytes32, PuzzleInfo], + offer_dict: dict[Optional[bytes32], int], + driver_dict: dict[bytes32, PuzzleInfo], action_scope: WalletActionScope, solver: Solver, fee: uint64 = uint64(0), - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> Optional[Offer]: for puzzle_info in driver_dict.values(): if ( @@ -974,7 +974,7 @@ def check_for_owner_change_in_drivers(self, puzzle_info: PuzzleInfo, driver_info return True return False - async def get_offer_summary(self, offer: Offer) -> Dict[str, Any]: + async def get_offer_summary(self, offer: Offer) -> dict[str, Any]: for puzzle_info in offer.driver_dict.values(): if ( puzzle_info.check_type( @@ -1010,7 +1010,7 @@ async def get_offer_summary(self, offer: Offer) -> Dict[str, Any]: async def check_for_final_modifications( self, offer: Offer, solver: Solver, action_scope: WalletActionScope - ) -> Tuple[Offer, Solver]: + ) -> tuple[Offer, Solver]: for puzzle_info in offer.driver_dict.values(): if ( puzzle_info.check_type( @@ -1040,10 +1040,10 @@ async def check_for_final_modifications( async def check_for_requested_payment_modifications( self, - requested_payments: Dict[Optional[bytes32], List[Payment]], - driver_dict: Dict[bytes32, PuzzleInfo], + requested_payments: dict[Optional[bytes32], list[Payment]], + driver_dict: dict[bytes32, PuzzleInfo], taking: bool, - ) -> Dict[Optional[bytes32], List[Payment]]: + ) -> dict[Optional[bytes32], list[Payment]]: # This function exclusively deals with CR-CATs for now if not taking: for asset_id, puzzle_info in driver_dict.items(): diff --git a/chia/wallet/trade_record.py b/chia/wallet/trade_record.py index 3b8a91d5d9c0..0c2616ccff9a 100644 --- a/chia/wallet/trade_record.py +++ b/chia/wallet/trade_record.py @@ -1,7 +1,7 @@ from __future__ import annotations from dataclasses import dataclass -from typing import Any, Dict, List, Optional, Tuple, Type, TypeVar +from typing import Any, Optional, TypeVar from chia.types.blockchain_format.coin import Coin from chia.types.blockchain_format.sized_bytes import bytes32 @@ -28,12 +28,12 @@ class TradeRecordOld(Streamable): sent: uint32 offer: bytes taken_offer: Optional[bytes] - coins_of_interest: List[Coin] + coins_of_interest: list[Coin] trade_id: bytes32 status: uint32 # TradeStatus, enum not streamable - sent_to: List[Tuple[str, uint8, Optional[str]]] # MempoolSubmissionStatus.status enum not streamable + sent_to: list[tuple[str, uint8, Optional[str]]] # MempoolSubmissionStatus.status enum not streamable - def to_json_dict_convenience(self) -> Dict[str, Any]: + def to_json_dict_convenience(self) -> dict[str, Any]: formatted = self.to_json_dict() formatted["status"] = TradeStatus(self.status).name offer_to_summarize: bytes = self.offer if self.taken_offer is None else self.taken_offer @@ -51,7 +51,7 @@ def to_json_dict_convenience(self) -> Dict[str, Any]: @classmethod def from_json_dict_convenience( - cls: Type[_T_TradeRecord], record: Dict[str, Any], offer: str = "" + cls: type[_T_TradeRecord], record: dict[str, Any], offer: str = "" ) -> _T_TradeRecord: new_record = record.copy() new_record["status"] = TradeStatus[record["status"]].value diff --git a/chia/wallet/trading/offer.py b/chia/wallet/trading/offer.py index b0ed6c9e6c64..b8b7d5d816f9 100644 --- a/chia/wallet/trading/offer.py +++ b/chia/wallet/trading/offer.py @@ -1,7 +1,7 @@ from __future__ import annotations from dataclasses import dataclass, field -from typing import Any, BinaryIO, Dict, List, Optional, Set, Tuple, Union +from typing import Any, BinaryIO, Optional, Union from chia_rs import G2Element from clvm_tools.binutils import disassemble @@ -49,8 +49,8 @@ def detect_dependent_coin( - names: List[bytes32], deps: Dict[bytes32, List[bytes32]], announcement_dict: Dict[bytes32, List[bytes32]] -) -> Optional[Tuple[bytes32, bytes32]]: + names: list[bytes32], deps: dict[bytes32, list[bytes32]], announcement_dict: dict[bytes32, list[bytes32]] +) -> Optional[tuple[bytes32, bytes32]]: # First, we check for any dependencies on coins in the same bundle for name in names: for dependency in deps[name]: @@ -75,19 +75,19 @@ def from_condition_and_nonce(cls, condition: Program, nonce: bytes32) -> Notariz @dataclass(frozen=True, eq=False) class Offer: - requested_payments: Dict[ - Optional[bytes32], List[NotarizedPayment] + requested_payments: dict[ + Optional[bytes32], list[NotarizedPayment] ] # The key is the asset id of the asset being requested _bundle: WalletSpendBundle - driver_dict: Dict[bytes32, PuzzleInfo] # asset_id -> asset driver + driver_dict: dict[bytes32, PuzzleInfo] # asset_id -> asset driver # this is a cache of the coin additions made by the SpendBundle (_bundle) # ordered by the coin being spent - _additions: Dict[Coin, List[Coin]] = field(init=False, repr=False) - _hints: Dict[bytes32, bytes32] = field(init=False) - _offered_coins: Dict[Optional[bytes32], List[Coin]] = field(init=False, repr=False) + _additions: dict[Coin, list[Coin]] = field(init=False, repr=False) + _hints: dict[bytes32, bytes32] = field(init=False) + _offered_coins: dict[Optional[bytes32], list[Coin]] = field(init=False, repr=False) _final_spend_bundle: Optional[WalletSpendBundle] = field(init=False, repr=False) - _conditions: Optional[Dict[Coin, List[Condition]]] = field(init=False) + _conditions: Optional[dict[Coin, list[Condition]]] = field(init=False) @staticmethod def ph() -> bytes32: @@ -95,15 +95,15 @@ def ph() -> bytes32: @staticmethod def notarize_payments( - requested_payments: Dict[Optional[bytes32], List[Payment]], # `None` means you are requesting XCH - coins: List[Coin], - ) -> Dict[Optional[bytes32], List[NotarizedPayment]]: + requested_payments: dict[Optional[bytes32], list[Payment]], # `None` means you are requesting XCH + coins: list[Coin], + ) -> dict[Optional[bytes32], list[NotarizedPayment]]: # This sort should be reproducible in CLVM with `>s` - sorted_coins: List[Coin] = sorted(coins, key=Coin.name) - sorted_coin_list: List[List[Union[bytes32, uint64]]] = [coin_as_list(c) for c in sorted_coins] + sorted_coins: list[Coin] = sorted(coins, key=Coin.name) + sorted_coin_list: list[list[Union[bytes32, uint64]]] = [coin_as_list(c) for c in sorted_coins] nonce: bytes32 = Program.to(sorted_coin_list).get_tree_hash() - notarized_payments: Dict[Optional[bytes32], List[NotarizedPayment]] = {} + notarized_payments: dict[Optional[bytes32], list[NotarizedPayment]] = {} for asset_id, payments in requested_payments.items(): notarized_payments[asset_id] = [] for p in payments: @@ -115,10 +115,10 @@ def notarize_payments( # The announcements returned from this function must be asserted in whatever spend bundle is created by the wallet @staticmethod def calculate_announcements( - notarized_payments: Dict[Optional[bytes32], List[NotarizedPayment]], - driver_dict: Dict[bytes32, PuzzleInfo], - ) -> List[AssertPuzzleAnnouncement]: - announcements: List[AssertPuzzleAnnouncement] = [] + notarized_payments: dict[Optional[bytes32], list[NotarizedPayment]], + driver_dict: dict[bytes32, PuzzleInfo], + ) -> list[AssertPuzzleAnnouncement]: + announcements: list[AssertPuzzleAnnouncement] = [] for asset_id, payments in notarized_payments.items(): if asset_id is not None: if asset_id not in driver_dict: @@ -135,7 +135,7 @@ def calculate_announcements( def __post_init__(self) -> None: # Verify that there are no duplicate payments for payments in self.requested_payments.values(): - payment_programs: List[bytes32] = [p.name() for p in payments] + payment_programs: list[bytes32] = [p.name() for p in payments] if len(set(payment_programs)) != len(payment_programs): raise ValueError("Bundle has duplicate requested payments") @@ -145,8 +145,8 @@ def __post_init__(self) -> None: raise ValueError("Offer does not have enough driver information about the requested payments") # populate the _additions cache - adds: Dict[Coin, List[Coin]] = {} - hints: Dict[bytes32, bytes32] = {} + adds: dict[Coin, list[Coin]] = {} + hints: dict[bytes32, bytes32] = {} max_cost = int(DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM) for cs in self._bundle.coin_spends: # you can't spend the same coin twice in the same SpendBundle @@ -164,9 +164,9 @@ def __post_init__(self) -> None: object.__setattr__(self, "_hints", hints) object.__setattr__(self, "_conditions", None) - def conditions(self) -> Dict[Coin, List[Condition]]: + def conditions(self) -> dict[Coin, list[Condition]]: if self._conditions is None: - conditions: Dict[Coin, List[Condition]] = {} + conditions: dict[Coin, list[Condition]] = {} max_cost = int(DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM) for cs in self._bundle.coin_spends: try: @@ -181,7 +181,7 @@ def conditions(self) -> Dict[Coin, List[Condition]]: assert self._conditions is not None, "self._conditions is None" return self._conditions - def valid_times(self) -> Dict[Coin, ConditionValidTimes]: + def valid_times(self) -> dict[Coin, ConditionValidTimes]: return {coin: parse_timelock_info(conditions) for coin, conditions in self.conditions().items()} def absolute_valid_times_ban_relatives(self) -> ConditionValidTimes: @@ -197,13 +197,13 @@ def absolute_valid_times_ban_relatives(self) -> ConditionValidTimes: raise ValueError("Offers with relative timelocks are not currently supported") return valid_times - def hints(self) -> Dict[bytes32, bytes32]: + def hints(self) -> dict[bytes32, bytes32]: return self._hints - def additions(self) -> List[Coin]: + def additions(self) -> list[Coin]: return [c for additions in self._additions.values() for c in additions] - def removals(self) -> List[Coin]: + def removals(self) -> list[Coin]: return self._bundle.removals() def fees(self) -> int: @@ -212,7 +212,7 @@ def fees(self) -> int: amount_out = sum(_.amount for _ in self.additions()) return int(amount_in - amount_out) - def coin_spends(self) -> List[CoinSpend]: + def coin_spends(self) -> list[CoinSpend]: return self._bundle.coin_spends def aggregated_signature(self) -> G2Element: @@ -220,15 +220,15 @@ def aggregated_signature(self) -> G2Element: # This method does not get every coin that is being offered, only the `settlement_payment` children # It's also a little heuristic, but it should get most things - def _get_offered_coins(self) -> Dict[Optional[bytes32], List[Coin]]: - offered_coins: Dict[Optional[bytes32], List[Coin]] = {} + def _get_offered_coins(self) -> dict[Optional[bytes32], list[Coin]]: + offered_coins: dict[Optional[bytes32], list[Coin]] = {} for parent_spend in self._bundle.coin_spends: - coins_for_this_spend: List[Coin] = [] + coins_for_this_spend: list[Coin] = [] parent_puzzle: UncurriedPuzzle = uncurry_puzzle(parent_spend.puzzle_reveal) parent_solution: Program = parent_spend.solution.to_program() - additions: List[Coin] = self._additions[parent_spend.coin] + additions: list[Coin] = self._additions[parent_spend.coin] puzzle_driver = match_puzzle(parent_puzzle) if puzzle_driver is not None: @@ -240,7 +240,7 @@ def _get_offered_coins(self) -> Dict[Optional[bytes32], List[Coin]]: # We're going to look at the conditions created by the inner puzzle conditions: Program = inner_puzzle.run(inner_solution) expected_num_matches: int = 0 - offered_amounts: List[int] = [] + offered_amounts: list[int] = [] for condition in conditions.as_iter(): if condition.first() == 51 and condition.rest().first() == OFFER_MOD_HASH: expected_num_matches += 1 @@ -281,7 +281,7 @@ def _get_offered_coins(self) -> Dict[Optional[bytes32], List[Coin]]: offered_coins[asset_id].extend(coins_for_this_spend) return offered_coins - def get_offered_coins(self) -> Dict[Optional[bytes32], List[Coin]]: + def get_offered_coins(self) -> dict[Optional[bytes32], list[Coin]]: try: if self._offered_coins is not None: return self._offered_coins @@ -289,44 +289,44 @@ def get_offered_coins(self) -> Dict[Optional[bytes32], List[Coin]]: object.__setattr__(self, "_offered_coins", self._get_offered_coins()) return self._offered_coins - def get_offered_amounts(self) -> Dict[Optional[bytes32], int]: - offered_coins: Dict[Optional[bytes32], List[Coin]] = self.get_offered_coins() - offered_amounts: Dict[Optional[bytes32], int] = {} + def get_offered_amounts(self) -> dict[Optional[bytes32], int]: + offered_coins: dict[Optional[bytes32], list[Coin]] = self.get_offered_coins() + offered_amounts: dict[Optional[bytes32], int] = {} for asset_id, coins in offered_coins.items(): offered_amounts[asset_id] = uint64(sum(c.amount for c in coins)) return offered_amounts - def get_requested_payments(self) -> Dict[Optional[bytes32], List[NotarizedPayment]]: + def get_requested_payments(self) -> dict[Optional[bytes32], list[NotarizedPayment]]: return self.requested_payments - def get_requested_amounts(self) -> Dict[Optional[bytes32], int]: - requested_amounts: Dict[Optional[bytes32], int] = {} + def get_requested_amounts(self) -> dict[Optional[bytes32], int]: + requested_amounts: dict[Optional[bytes32], int] = {} for asset_id, coins in self.get_requested_payments().items(): requested_amounts[asset_id] = uint64(sum(c.amount for c in coins)) return requested_amounts - def arbitrage(self) -> Dict[Optional[bytes32], int]: + def arbitrage(self) -> dict[Optional[bytes32], int]: """ Returns a dictionary of the type of each asset and amount that is involved in the trade With the amount being how much their offered amount within the offer exceeds/falls short of their requested amount. """ - offered_amounts: Dict[Optional[bytes32], int] = self.get_offered_amounts() - requested_amounts: Dict[Optional[bytes32], int] = self.get_requested_amounts() + offered_amounts: dict[Optional[bytes32], int] = self.get_offered_amounts() + requested_amounts: dict[Optional[bytes32], int] = self.get_requested_amounts() - arbitrage_dict: Dict[Optional[bytes32], int] = {} + arbitrage_dict: dict[Optional[bytes32], int] = {} for asset_id in [*requested_amounts.keys(), *offered_amounts.keys()]: arbitrage_dict[asset_id] = offered_amounts.get(asset_id, 0) - requested_amounts.get(asset_id, 0) return arbitrage_dict # This is a method mostly for the UI that creates a JSON summary of the offer - def summary(self) -> Tuple[Dict[str, int], Dict[str, int], Dict[str, Dict[str, Any]], ConditionValidTimes]: - offered_amounts: Dict[Optional[bytes32], int] = self.get_offered_amounts() - requested_amounts: Dict[Optional[bytes32], int] = self.get_requested_amounts() + def summary(self) -> tuple[dict[str, int], dict[str, int], dict[str, dict[str, Any]], ConditionValidTimes]: + offered_amounts: dict[Optional[bytes32], int] = self.get_offered_amounts() + requested_amounts: dict[Optional[bytes32], int] = self.get_requested_amounts() - def keys_to_strings(dic: Dict[Optional[bytes32], Any]) -> Dict[str, Any]: - new_dic: Dict[str, Any] = {} + def keys_to_strings(dic: dict[Optional[bytes32], Any]) -> dict[str, Any]: + new_dic: dict[str, Any] = {} for key in dic: if key is None: new_dic["xch"] = dic[key] @@ -334,7 +334,7 @@ def keys_to_strings(dic: Dict[Optional[bytes32], Any]) -> Dict[str, Any]: new_dic[key.hex()] = dic[key] return new_dic - driver_dict: Dict[str, Any] = {} + driver_dict: dict[str, Any] = {} for key, value in self.driver_dict.items(): driver_dict[key.hex()] = value.info @@ -347,12 +347,12 @@ def keys_to_strings(dic: Dict[Optional[bytes32], Any]) -> Dict[str, Any]: # Also mostly for the UI, returns a dictionary of assets and how much of them is pended for this offer # This method is also imperfect for sufficiently complex spends - def get_pending_amounts(self) -> Dict[str, int]: - all_additions: List[Coin] = self.additions() - all_removals: List[Coin] = self.removals() - non_ephemeral_removals: List[Coin] = list(filter(lambda c: c not in all_additions, all_removals)) + def get_pending_amounts(self) -> dict[str, int]: + all_additions: list[Coin] = self.additions() + all_removals: list[Coin] = self.removals() + non_ephemeral_removals: list[Coin] = list(filter(lambda c: c not in all_additions, all_removals)) - pending_dict: Dict[str, int] = {} + pending_dict: dict[str, int] = {} # First we add up the amounts of all coins that share an ancestor with the offered coins (i.e. a primary coin) for asset_id, coins in self.get_offered_coins().items(): name = "xch" if asset_id is None else asset_id.hex() @@ -372,16 +372,16 @@ def get_pending_amounts(self) -> Dict[str, int]: return pending_dict # This method returns all of the coins that are being used in the offer (without which it would be invalid) - def get_involved_coins(self) -> List[Coin]: + def get_involved_coins(self) -> list[Coin]: additions = self.additions() return list(filter(lambda c: c not in additions, self.removals())) # This returns the non-ephemeral removal that is an ancestor of the specified coin # This should maybe move to the SpendBundle object at some point def get_root_removal(self, coin: Coin) -> Coin: - all_removals: Set[Coin] = set(self.removals()) - all_removal_ids: Set[bytes32] = {c.name() for c in all_removals} - non_ephemeral_removals: Set[Coin] = { + all_removals: set[Coin] = set(self.removals()) + all_removal_ids: set[bytes32] = {c.name() for c in all_removals} + non_ephemeral_removals: set[Coin] = { c for c in all_removals if c.parent_coin_info not in {r.name() for r in all_removals} } if coin.name() not in all_removal_ids and coin.parent_coin_info not in all_removal_ids: @@ -393,19 +393,19 @@ def get_root_removal(self, coin: Coin) -> Coin: return coin # This will only return coins that are ancestors of settlement payments - def get_primary_coins(self) -> List[Coin]: - primary_coins: Set[Coin] = set() + def get_primary_coins(self) -> list[Coin]: + primary_coins: set[Coin] = set() for _, coins in self.get_offered_coins().items(): for coin in coins: primary_coins.add(self.get_root_removal(coin)) return list(primary_coins) # This returns the minimum coins that when spent will invalidate the rest of the bundle - def get_cancellation_coins(self) -> List[Coin]: + def get_cancellation_coins(self) -> list[Coin]: # First, we're going to gather: - dependencies: Dict[bytes32, List[bytes32]] = {} # all of the hashes that each coin depends on - announcements: Dict[bytes32, List[bytes32]] = {} # all of the hashes of the announcement that each coin makes - coin_names: List[bytes32] = [] # The names of all the coins + dependencies: dict[bytes32, list[bytes32]] = {} # all of the hashes that each coin depends on + announcements: dict[bytes32, list[bytes32]] = {} # all of the hashes of the announcement that each coin makes + coin_names: list[bytes32] = [] # The names of all the coins additions = self.additions() for spend in [cs for cs in self._bundle.coin_spends if cs.coin not in additions]: name = bytes32(spend.coin.name()) @@ -430,8 +430,8 @@ def get_cancellation_coins(self) -> List[Coin]: if removed is None: break removed_coin, provider = removed - removed_announcements: List[bytes32] = announcements[removed_coin] - remove_these_keys: List[bytes32] = [removed_coin] + removed_announcements: list[bytes32] = announcements[removed_coin] + remove_these_keys: list[bytes32] = [removed_coin] while True: for coin, deps in dependencies.items(): if set(deps) & set(removed_announcements) and coin != provider: @@ -449,14 +449,14 @@ def get_cancellation_coins(self) -> List[Coin]: return [cs.coin for cs in self._bundle.coin_spends if cs.coin.name() in coin_names] @classmethod - def aggregate(cls, offers: List[Offer]) -> Offer: - total_requested_payments: Dict[Optional[bytes32], List[NotarizedPayment]] = {} + def aggregate(cls, offers: list[Offer]) -> Offer: + total_requested_payments: dict[Optional[bytes32], list[NotarizedPayment]] = {} total_bundle = WalletSpendBundle([], G2Element()) - total_driver_dict: Dict[bytes32, PuzzleInfo] = {} + total_driver_dict: dict[bytes32, PuzzleInfo] = {} for offer in offers: # First check for any overlap in inputs - total_inputs: Set[Coin] = {cs.coin for cs in total_bundle.coin_spends} - offer_inputs: Set[Coin] = {cs.coin for cs in offer._bundle.coin_spends} + total_inputs: set[Coin] = {cs.coin for cs in total_bundle.coin_spends} + offer_inputs: set[Coin] = {cs.coin for cs in offer._bundle.coin_spends} if total_inputs & offer_inputs: raise ValueError("The aggregated offers overlap inputs") @@ -486,23 +486,23 @@ def to_valid_spend(self, arbitrage_ph: Optional[bytes32] = None, solver: Solver if not self.is_valid(): raise ValueError("Offer is currently incomplete") - completion_spends: List[CoinSpend] = [] - all_offered_coins: Dict[Optional[bytes32], List[Coin]] = self.get_offered_coins() - total_arbitrage_amount: Dict[Optional[bytes32], int] = self.arbitrage() + completion_spends: list[CoinSpend] = [] + all_offered_coins: dict[Optional[bytes32], list[Coin]] = self.get_offered_coins() + total_arbitrage_amount: dict[Optional[bytes32], int] = self.arbitrage() for asset_id, payments in self.requested_payments.items(): - offered_coins: List[Coin] = all_offered_coins[asset_id] + offered_coins: list[Coin] = all_offered_coins[asset_id] # Because of CAT supply laws, we must specify a place for the leftovers to go arbitrage_amount: int = total_arbitrage_amount[asset_id] - all_payments: List[NotarizedPayment] = payments.copy() + all_payments: list[NotarizedPayment] = payments.copy() if arbitrage_amount > 0: assert arbitrage_amount is not None assert arbitrage_ph is not None all_payments.append(NotarizedPayment(arbitrage_ph, uint64(arbitrage_amount))) # Some assets need to know about siblings so we need to collect all spends first to be able to use them - coin_to_spend_dict: Dict[Coin, CoinSpend] = {} - coin_to_solution_dict: Dict[Coin, Program] = {} + coin_to_spend_dict: dict[Coin, CoinSpend] = {} + coin_to_solution_dict: dict[Coin, Program] = {} for coin in offered_coins: parent_spend: CoinSpend = list( filter(lambda cs: cs.coin.name() == coin.parent_coin_info, self._bundle.coin_spends) @@ -511,9 +511,9 @@ def to_valid_spend(self, arbitrage_ph: Optional[bytes32] = None, solver: Solver inner_solutions = [] if coin == offered_coins[0]: - nonces: List[bytes32] = [p.nonce for p in all_payments] + nonces: list[bytes32] = [p.nonce for p in all_payments] for nonce in list(dict.fromkeys(nonces)): # dedup without messing with order - nonce_payments: List[NotarizedPayment] = list(filter(lambda p: p.nonce == nonce, all_payments)) + nonce_payments: list[NotarizedPayment] = list(filter(lambda p: p.nonce == nonce, all_payments)) inner_solutions.append((nonce, [np.as_condition_args() for np in nonce_payments])) coin_to_solution_dict[coin] = Program.to(inner_solutions) @@ -580,13 +580,13 @@ def to_spend_bundle(self) -> WalletSpendBundle: except AttributeError: pass # Before we serialize this as a SpendBundle, we need to serialize the `requested_payments` as dummy CoinSpends - additional_coin_spends: List[CoinSpend] = [] + additional_coin_spends: list[CoinSpend] = [] for asset_id, payments in self.requested_payments.items(): puzzle_reveal: Program = construct_puzzle(self.driver_dict[asset_id], OFFER_MOD) if asset_id else OFFER_MOD inner_solutions = [] - nonces: List[bytes32] = [p.nonce for p in payments] + nonces: list[bytes32] = [p.nonce for p in payments] for nonce in list(dict.fromkeys(nonces)): # dedup without messing with order - nonce_payments: List[NotarizedPayment] = list(filter(lambda p: p.nonce == nonce, payments)) + nonce_payments: list[NotarizedPayment] = list(filter(lambda p: p.nonce == nonce, payments)) inner_solutions.append((nonce, [np.as_condition_args() for np in nonce_payments])) additional_coin_spends.append( @@ -613,9 +613,9 @@ def to_spend_bundle(self) -> WalletSpendBundle: @classmethod def from_spend_bundle(cls, bundle: WalletSpendBundle) -> Offer: # Because of the `to_spend_bundle` method, we need to parse the dummy CoinSpends as `requested_payments` - requested_payments: Dict[Optional[bytes32], List[NotarizedPayment]] = {} - driver_dict: Dict[bytes32, PuzzleInfo] = {} - leftover_coin_spends: List[CoinSpend] = [] + requested_payments: dict[Optional[bytes32], list[NotarizedPayment]] = {} + driver_dict: dict[bytes32, PuzzleInfo] = {} + leftover_coin_spends: list[CoinSpend] = [] for coin_spend in bundle.coin_spends: driver = match_puzzle(uncurry_puzzle(coin_spend.puzzle_reveal)) if driver is not None: @@ -625,7 +625,7 @@ def from_spend_bundle(cls, bundle: WalletSpendBundle) -> Offer: else: asset_id = None if coin_spend.coin.parent_coin_info == ZERO_32: - notarized_payments: List[NotarizedPayment] = [] + notarized_payments: list[NotarizedPayment] = [] for payment_group in coin_spend.solution.to_program().as_iter(): nonce = bytes32(payment_group.first().as_atom()) payment_args_list = payment_group.rest().as_iter() @@ -652,7 +652,7 @@ def __eq__(self, other: object) -> bool: def compress(self, version: Optional[int] = None) -> bytes: as_spend_bundle = self.to_spend_bundle() if version is None: - mods: List[bytes] = [bytes(s.puzzle_reveal.to_program().uncurry()[0]) for s in as_spend_bundle.coin_spends] + mods: list[bytes] = [bytes(s.puzzle_reveal.to_program().uncurry()[0]) for s in as_spend_bundle.coin_spends] version = max(lowest_best_version(mods), 6) # Clients lower than version 6 should not be able to parse return compress_object_with_puzzles(bytes(as_spend_bundle), version) diff --git a/chia/wallet/trading/trade_store.py b/chia/wallet/trading/trade_store.py index a1d925261e63..9917d8db8859 100644 --- a/chia/wallet/trading/trade_store.py +++ b/chia/wallet/trading/trade_store.py @@ -2,7 +2,7 @@ import logging from time import perf_counter -from typing import Dict, List, Optional, Set, Tuple +from typing import Optional import aiosqlite @@ -23,7 +23,7 @@ async def migrate_coin_of_interest(log: logging.Logger, db: aiosqlite.Connection start_time = perf_counter() rows = await db.execute_fetchall("SELECT trade_record, trade_id from trade_records") - inserts: List[Tuple[bytes32, bytes32]] = [] + inserts: list[tuple[bytes32, bytes32]] = [] for row in rows: record: TradeRecordOld = TradeRecordOld.from_bytes(row[0]) for coin in record.coins_of_interest: @@ -57,7 +57,7 @@ async def migrate_is_my_offer(log: logging.Logger, db_connection: aiosqlite.Conn rows = await cursor.fetchall() await cursor.close() - updates: List[Tuple[int, str]] = [] + updates: list[tuple[int, str]] = [] for row in rows: record = TradeRecordOld.from_bytes(row[0]) is_my_offer = 1 if record.is_my_offer else 0 @@ -142,7 +142,7 @@ async def create( try: await conn.execute("CREATE TABLE trade_record_times(trade_id blob PRIMARY KEY, valid_times blob)") async with await conn.execute("SELECT trade_id from trade_records") as cursor: - trade_ids: List[bytes32] = [bytes32.from_hexstr(row[0]) for row in await cursor.fetchall()] + trade_ids: list[bytes32] = [bytes32.from_hexstr(row[0]) for row in await cursor.fetchall()] await conn.executemany( "INSERT INTO trade_record_times (trade_id, valid_times) VALUES(?, ?)", [(id, bytes(ConditionValidTimes())) for id in trade_ids], @@ -214,7 +214,7 @@ async def add_trade_record(self, record: TradeRecord, offer_name: bytes32, repla # remove all current coin ids await conn.execute("DELETE FROM coin_of_interest_to_trade_record WHERE trade_id=?", (record.trade_id,)) # now recreate them all - inserts: List[Tuple[bytes32, bytes32]] = [] + inserts: list[tuple[bytes32, bytes32]] = [] for coin in record.coins_of_interest: inserts.append((coin.name(), record.trade_id)) await conn.executemany( @@ -294,7 +294,7 @@ async def increment_sent( await self.add_trade_record(tx, offer.name()) return True - async def get_trades_count(self) -> Tuple[int, int, int]: + async def get_trades_count(self) -> tuple[int, int, int]: """ Returns the number of trades in the database broken down by is_my_offer status """ @@ -334,7 +334,7 @@ async def get_trade_record(self, trade_id: bytes32) -> Optional[TradeRecord]: return (await self._get_new_trade_records_from_old([TradeRecordOld.from_bytes(row[0])]))[0] return None - async def get_trade_record_with_status(self, status: TradeStatus) -> List[TradeRecord]: + async def get_trade_record_with_status(self, status: TradeStatus) -> list[TradeRecord]: """ Checks DB for TradeRecord with id: id and returns it. """ @@ -345,7 +345,7 @@ async def get_trade_record_with_status(self, status: TradeStatus) -> List[TradeR return await self._get_new_trade_records_from_old([TradeRecordOld.from_bytes(row[0]) for row in rows]) - async def get_coin_ids_of_interest_with_trade_statuses(self, trade_statuses: List[TradeStatus]) -> Set[bytes32]: + async def get_coin_ids_of_interest_with_trade_statuses(self, trade_statuses: list[TradeStatus]) -> set[bytes32]: """ Checks DB for TradeRecord with id: id and returns it. """ @@ -360,7 +360,7 @@ async def get_coin_ids_of_interest_with_trade_statuses(self, trade_statuses: Lis ) return {bytes32(row[0]) for row in rows} - async def get_all_trades(self) -> List[TradeRecord]: + async def get_all_trades(self) -> list[TradeRecord]: """ Returns all stored trades. """ @@ -382,7 +382,7 @@ async def get_trades_between( exclude_my_offers: bool = False, exclude_taken_offers: bool = False, include_completed: bool = False, - ) -> List[TradeRecord]: + ) -> list[TradeRecord]: """ Return a list of trades sorted by a key and between a start and end index. """ @@ -486,11 +486,11 @@ async def delete_trade_record(self, trade_id: bytes32) -> None: await (await conn.execute("DELETE FROM trade_records WHERE trade_id=?", (trade_id.hex(),))).close() await (await conn.execute("DELETE FROM trade_record_times WHERE trade_id=?", (trade_id,))).close() - async def _get_new_trade_records_from_old(self, old_records: List[TradeRecordOld]) -> List[TradeRecord]: - trade_id_to_valid_times: Dict[bytes, ConditionValidTimes] = {} + async def _get_new_trade_records_from_old(self, old_records: list[TradeRecordOld]) -> list[TradeRecord]: + trade_id_to_valid_times: dict[bytes, ConditionValidTimes] = {} empty_valid_times = ConditionValidTimes() async with self.db_wrapper.reader_no_transaction() as conn: - chunked_records: List[List[TradeRecordOld]] = [ + chunked_records: list[list[TradeRecordOld]] = [ old_records[i : min(len(old_records), i + self.db_wrapper.host_parameter_limit)] for i in range(0, len(old_records), self.db_wrapper.host_parameter_limit) ] diff --git a/chia/wallet/transaction_record.py b/chia/wallet/transaction_record.py index 8c8815bf6390..8fb0d71cc0a3 100644 --- a/chia/wallet/transaction_record.py +++ b/chia/wallet/transaction_record.py @@ -1,7 +1,8 @@ from __future__ import annotations +import builtins from dataclasses import dataclass -from typing import Any, Dict, Generic, List, Optional, Tuple, Type, TypeVar +from typing import Any, Generic, Optional, TypeVar from chia.consensus.coinbase import farmer_parent_id, pool_parent_id from chia.types.blockchain_format.coin import Coin @@ -24,7 +25,7 @@ @dataclass class ItemAndTransactionRecords(Generic[T]): item: T - transaction_records: List[TransactionRecord] + transaction_records: list[TransactionRecord] @streamable @@ -42,19 +43,19 @@ class TransactionRecordOld(Streamable): confirmed: bool sent: uint32 spend_bundle: Optional[WalletSpendBundle] - additions: List[Coin] - removals: List[Coin] + additions: list[Coin] + removals: list[Coin] wallet_id: uint32 # Represents the list of peers that we sent the transaction to, whether each one # included it in the mempool, and what the error message (if any) was - sent_to: List[Tuple[str, uint8, Optional[str]]] + sent_to: list[tuple[str, uint8, Optional[str]]] trade_id: Optional[bytes32] type: uint32 # TransactionType # name is also called bundle_id and tx_id name: bytes32 - memos: List[Tuple[bytes32, List[bytes]]] + memos: list[tuple[bytes32, list[bytes]]] def is_in_mempool(self) -> bool: # If one of the nodes we sent it to responded with success or pending, we return True @@ -78,19 +79,21 @@ def height_farmed(self, genesis_challenge: bytes32) -> Optional[uint32]: return uint32(block_index) return None - def get_memos(self) -> Dict[bytes32, List[bytes]]: + def get_memos(self) -> dict[bytes32, list[bytes]]: return {coin_id: ms for coin_id, ms in self.memos} @classmethod - def from_json_dict_convenience(cls: Type[_T_TransactionRecord], modified_tx_input: Dict) -> _T_TransactionRecord: + def from_json_dict_convenience( + cls: builtins.type[_T_TransactionRecord], modified_tx_input: dict + ) -> _T_TransactionRecord: modified_tx = modified_tx_input.copy() if "to_address" in modified_tx: modified_tx["to_puzzle_hash"] = decode_puzzle_hash(modified_tx["to_address"]).hex() if "to_address" in modified_tx: del modified_tx["to_address"] # Converts memos from a flat dict into a nested list - memos_dict: Dict[str, List[str]] = {} - memos_list: List = [] + memos_dict: dict[str, list[str]] = {} + memos_list: list = [] if "memos" in modified_tx: for coin_id, memo in modified_tx["memos"].items(): if coin_id not in memos_dict: @@ -102,13 +105,13 @@ def from_json_dict_convenience(cls: Type[_T_TransactionRecord], modified_tx_inpu return cls.from_json_dict(modified_tx) @classmethod - def from_json_dict(cls: Type[_T_TransactionRecord], json_dict: Dict[str, Any]) -> _T_TransactionRecord: + def from_json_dict(cls: builtins.type[_T_TransactionRecord], json_dict: dict[str, Any]) -> _T_TransactionRecord: try: return super().from_json_dict(json_dict) except Exception: return cls.from_json_dict_convenience(json_dict) - def to_json_dict_convenience(self, config: Dict) -> Dict: + def to_json_dict_convenience(self, config: dict) -> dict: selected = config["selected_network"] prefix = config["network_overrides"]["config"][selected]["address_prefix"] formatted = self.to_json_dict() @@ -133,7 +136,7 @@ def is_valid(self) -> bool: return True return False - def hint_dict(self) -> Dict[bytes32, bytes32]: + def hint_dict(self) -> dict[bytes32, bytes32]: return {coin_id: bytes32(memos[0]) for coin_id, memos in self.memos if len(memos) > 0 and len(memos[0]) == 32} diff --git a/chia/wallet/util/address_type.py b/chia/wallet/util/address_type.py index ad522c9eec77..aa842599e2f3 100644 --- a/chia/wallet/util/address_type.py +++ b/chia/wallet/util/address_type.py @@ -1,7 +1,7 @@ from __future__ import annotations from enum import Enum -from typing import Any, Dict, Set +from typing import Any from chia.util.bech32m import bech32_decode, convertbits from chia.util.config import selected_network_address_prefix @@ -12,7 +12,7 @@ class AddressType(Enum): NFT = "nft" DID = "did:chia:" - def hrp(self, config: Dict[str, Any]) -> str: + def hrp(self, config: dict[str, Any]) -> str: if self == AddressType.XCH: # Special case to map XCH to the current network's address prefix return selected_network_address_prefix(config) @@ -24,7 +24,7 @@ def expected_decoded_length(self) -> int: return 32 -def is_valid_address(address: str, allowed_types: Set[AddressType], config: Dict[str, Any]) -> bool: +def is_valid_address(address: str, allowed_types: set[AddressType], config: dict[str, Any]) -> bool: try: ensure_valid_address(address, allowed_types=allowed_types, config=config) return True @@ -32,7 +32,7 @@ def is_valid_address(address: str, allowed_types: Set[AddressType], config: Dict return False -def ensure_valid_address(address: str, *, allowed_types: Set[AddressType], config: Dict[str, Any]) -> str: +def ensure_valid_address(address: str, *, allowed_types: set[AddressType], config: dict[str, Any]) -> str: hrp, b32data = bech32_decode(address) if not b32data or not hrp: raise ValueError(f"Invalid address: {address}") diff --git a/chia/wallet/util/blind_signer_tl.py b/chia/wallet/util/blind_signer_tl.py index d5e8abb8cf50..2d017b5bddef 100644 --- a/chia/wallet/util/blind_signer_tl.py +++ b/chia/wallet/util/blind_signer_tl.py @@ -1,7 +1,6 @@ from __future__ import annotations from dataclasses import dataclass, field -from typing import List from chia.types.blockchain_format.sized_bytes import bytes32 from chia.util.ints import uint64 @@ -41,7 +40,7 @@ def to_wallet_api(_from: BSTLSigningTarget) -> SigningTarget: @clvm_streamable @dataclass(frozen=True) class BSTLSumHint(Streamable): - fingerprints: List[bytes] = field(metadata=dict(key="f")) + fingerprints: list[bytes] = field(metadata=dict(key="f")) synthetic_offset: bytes = field(metadata=dict(key="o")) final_pubkey: bytes = field(metadata=dict(key="p")) @@ -58,7 +57,7 @@ def to_wallet_api(_from: BSTLSumHint) -> SumHint: @dataclass(frozen=True) class BSTLPathHint(Streamable): root_fingerprint: bytes = field(metadata=dict(key="f")) - path: List[uint64] = field(metadata=dict(key="p")) + path: list[uint64] = field(metadata=dict(key="p")) @staticmethod def from_wallet_api(_from: PathHint) -> BSTLPathHint: @@ -72,9 +71,9 @@ def to_wallet_api(_from: BSTLPathHint) -> PathHint: @clvm_streamable @dataclass(frozen=True) class BSTLSigningInstructions(Streamable): - sum_hints: List[BSTLSumHint] = field(metadata=dict(key="s")) - path_hints: List[BSTLPathHint] = field(metadata=dict(key="p")) - targets: List[BSTLSigningTarget] = field(metadata=dict(key="t")) + sum_hints: list[BSTLSumHint] = field(metadata=dict(key="s")) + path_hints: list[BSTLPathHint] = field(metadata=dict(key="p")) + targets: list[BSTLSigningTarget] = field(metadata=dict(key="t")) @staticmethod def from_wallet_api(_from: SigningInstructions) -> BSTLSigningInstructions: @@ -98,9 +97,9 @@ def to_wallet_api(_from: BSTLSigningInstructions) -> SigningInstructions: @clvm_streamable @dataclass(frozen=True) class BSTLUnsignedTransaction(Streamable): - sum_hints: List[BSTLSumHint] = field(metadata=dict(key="s")) - path_hints: List[BSTLPathHint] = field(metadata=dict(key="p")) - targets: List[BSTLSigningTarget] = field(metadata=dict(key="t")) + sum_hints: list[BSTLSumHint] = field(metadata=dict(key="s")) + path_hints: list[BSTLPathHint] = field(metadata=dict(key="p")) + targets: list[BSTLSigningTarget] = field(metadata=dict(key="t")) @staticmethod def from_wallet_api(_from: UnsignedTransaction) -> BSTLUnsignedTransaction: diff --git a/chia/wallet/util/clvm_streamable.py b/chia/wallet/util/clvm_streamable.py index 109541aee8f0..7849970783bb 100644 --- a/chia/wallet/util/clvm_streamable.py +++ b/chia/wallet/util/clvm_streamable.py @@ -3,7 +3,7 @@ import dataclasses import functools from types import MappingProxyType -from typing import Any, Callable, Dict, Generic, List, Optional, Type, TypeVar, Union, get_args, get_type_hints +from typing import Any, Callable, Generic, Optional, TypeVar, Union, get_args, get_type_hints from hsms.clvm_serde import from_program_for_type, to_program_for_type from typing_extensions import TypeGuard @@ -22,8 +22,8 @@ _T_Streamable = TypeVar("_T_Streamable", bound=Streamable) -def clvm_streamable(cls: Type[Streamable]) -> Type[Streamable]: - wrapped_cls: Type[Streamable] = streamable(cls) +def clvm_streamable(cls: type[Streamable]) -> type[Streamable]: + wrapped_cls: type[Streamable] = streamable(cls) setattr(wrapped_cls, "_clvm_streamable", True) hints = get_type_hints(cls) @@ -55,10 +55,10 @@ def byte_serialize_clvm_streamable( def json_serialize_with_clvm_streamable( streamable: object, - next_recursion_step: Optional[Callable[..., Dict[str, Any]]] = None, + next_recursion_step: Optional[Callable[..., dict[str, Any]]] = None, translation_layer: Optional[TranslationLayer] = None, **next_recursion_env: Any, -) -> Union[str, Dict[str, Any]]: +) -> Union[str, dict[str, Any]]: if next_recursion_step is None: next_recursion_step = recurse_jsonify if is_clvm_streamable(streamable): @@ -71,9 +71,9 @@ def json_serialize_with_clvm_streamable( def program_deserialize_clvm_streamable( - program: Program, clvm_streamable_type: Type[_T_Streamable], translation_layer: Optional[TranslationLayer] = None + program: Program, clvm_streamable_type: type[_T_Streamable], translation_layer: Optional[TranslationLayer] = None ) -> _T_Streamable: - type_to_deserialize_from: Type[Streamable] = clvm_streamable_type + type_to_deserialize_from: type[Streamable] = clvm_streamable_type if translation_layer is not None: mapping = translation_layer.get_mapping(clvm_streamable_type) if mapping is not None: @@ -87,7 +87,7 @@ def program_deserialize_clvm_streamable( def byte_deserialize_clvm_streamable( - blob: bytes, clvm_streamable_type: Type[_T_Streamable], translation_layer: Optional[TranslationLayer] = None + blob: bytes, clvm_streamable_type: type[_T_Streamable], translation_layer: Optional[TranslationLayer] = None ) -> _T_Streamable: return program_deserialize_clvm_streamable( Program.from_bytes(blob), clvm_streamable_type, translation_layer=translation_layer @@ -100,7 +100,7 @@ def is_compound_type(typ: Any) -> bool: # TODO: this is more than _just_ a Streamable, but it is also a Streamable and that's # useful for now -def is_clvm_streamable_type(v: Type[object]) -> TypeGuard[Type[Streamable]]: +def is_clvm_streamable_type(v: type[object]) -> TypeGuard[type[Streamable]]: return issubclass(v, Streamable) and hasattr(v, "_clvm_streamable") @@ -111,8 +111,8 @@ def is_clvm_streamable(v: object) -> TypeGuard[Streamable]: def json_deserialize_with_clvm_streamable( - json_dict: Union[str, Dict[str, Any]], - streamable_type: Type[_T_Streamable], + json_dict: Union[str, dict[str, Any]], + streamable_type: type[_T_Streamable], translation_layer: Optional[TranslationLayer] = None, ) -> _T_Streamable: if isinstance(json_dict, str): @@ -165,18 +165,18 @@ def json_deserialize_with_clvm_streamable( @dataclasses.dataclass(frozen=True) class TranslationLayerMapping(Generic[_T_ClvmStreamable, _T_TLClvmStreamable]): - from_type: Type[_T_ClvmStreamable] - to_type: Type[_T_TLClvmStreamable] + from_type: type[_T_ClvmStreamable] + to_type: type[_T_TLClvmStreamable] serialize_function: Callable[[_T_ClvmStreamable], _T_TLClvmStreamable] deserialize_function: Callable[[_T_TLClvmStreamable], _T_ClvmStreamable] @dataclasses.dataclass(frozen=True) class TranslationLayer: - type_mappings: List[TranslationLayerMapping[Any, Any]] + type_mappings: list[TranslationLayerMapping[Any, Any]] def get_mapping( - self, _type: Type[_T_ClvmStreamable] + self, _type: type[_T_ClvmStreamable] ) -> Optional[TranslationLayerMapping[_T_ClvmStreamable, Streamable]]: mappings = [m for m in self.type_mappings if m.from_type == _type] if len(mappings) == 1: diff --git a/chia/wallet/util/compute_hints.py b/chia/wallet/util/compute_hints.py index de982e8aac57..18981d5594ce 100644 --- a/chia/wallet/util/compute_hints.py +++ b/chia/wallet/util/compute_hints.py @@ -1,7 +1,7 @@ from __future__ import annotations from dataclasses import dataclass -from typing import Dict, Optional, Tuple +from typing import Optional from chia.consensus.condition_costs import ConditionCost from chia.consensus.default_constants import DEFAULT_CONSTANTS @@ -24,10 +24,10 @@ def compute_spend_hints_and_additions( cs: CoinSpend, *, max_cost: int = DEFAULT_CONSTANTS.MAX_BLOCK_COST_CLVM, -) -> Tuple[Dict[bytes32, HintedCoin], int]: +) -> tuple[dict[bytes32, HintedCoin], int]: cost, result_program = cs.puzzle_reveal.run_with_cost(max_cost, cs.solution) - hinted_coins: Dict[bytes32, HintedCoin] = {} + hinted_coins: dict[bytes32, HintedCoin] = {} for condition in result_program.as_iter(): if cost > max_cost: raise ValidationError(Err.BLOCK_COST_EXCEEDS_MAX, "compute_spend_hints_and_additions() for CoinSpend") diff --git a/chia/wallet/util/compute_memos.py b/chia/wallet/util/compute_memos.py index a38a2ca51cfb..e445ab8d3ac0 100644 --- a/chia/wallet/util/compute_memos.py +++ b/chia/wallet/util/compute_memos.py @@ -1,7 +1,5 @@ from __future__ import annotations -from typing import Dict, List - from clvm.casts import int_from_bytes from chia.types.blockchain_format.coin import Coin @@ -13,9 +11,9 @@ from chia.wallet.wallet_spend_bundle import WalletSpendBundle -def compute_memos_for_spend(coin_spend: CoinSpend) -> Dict[bytes32, List[bytes]]: +def compute_memos_for_spend(coin_spend: CoinSpend) -> dict[bytes32, list[bytes]]: _, result = coin_spend.puzzle_reveal.run_with_cost(INFINITE_COST, coin_spend.solution) - memos: Dict[bytes32, List[bytes]] = {} + memos: dict[bytes32, list[bytes]] = {} for condition in result.as_python(): if condition[0] == ConditionOpcode.CREATE_COIN and len(condition) >= 4: # If only 3 elements (opcode + 2 args), there is no memo, this is ph, amount @@ -27,13 +25,13 @@ def compute_memos_for_spend(coin_spend: CoinSpend) -> Dict[bytes32, List[bytes]] return memos -def compute_memos(bundle: WalletSpendBundle) -> Dict[bytes32, List[bytes]]: +def compute_memos(bundle: WalletSpendBundle) -> dict[bytes32, list[bytes]]: """ Retrieves the memos for additions in this spend_bundle, which are formatted as a list in the 3rd parameter of CREATE_COIN. If there are no memos, the addition coin_id is not included. If they are not formatted as a list of bytes, they are not included. This is expensive to call, it should not be used in full node code. """ - memos: Dict[bytes32, List[bytes]] = {} + memos: dict[bytes32, list[bytes]] = {} for coin_spend in bundle.coin_spends: spend_memos = compute_memos_for_spend(coin_spend) for coin_name, coin_memos in spend_memos.items(): diff --git a/chia/wallet/util/curry_and_treehash.py b/chia/wallet/util/curry_and_treehash.py index dbdbd4ee9039..82045d09aa30 100644 --- a/chia/wallet/util/curry_and_treehash.py +++ b/chia/wallet/util/curry_and_treehash.py @@ -1,7 +1,8 @@ from __future__ import annotations +from collections.abc import Sequence from hashlib import sha256 -from typing import Callable, List, Sequence +from typing import Callable from clvm.casts import int_to_bytes @@ -56,7 +57,7 @@ def shatree_int(val: int) -> bytes32: # `1` if R is 0 -def curried_values_tree_hash(arguments: List[bytes32]) -> bytes32: +def curried_values_tree_hash(arguments: list[bytes32]) -> bytes32: if len(arguments) == 0: return ONE_TREEHASH diff --git a/chia/wallet/util/debug_spend_bundle.py b/chia/wallet/util/debug_spend_bundle.py index 462ac68a7b86..a6a836e8811a 100644 --- a/chia/wallet/util/debug_spend_bundle.py +++ b/chia/wallet/util/debug_spend_bundle.py @@ -1,7 +1,5 @@ from __future__ import annotations -from typing import List - from chia_rs import AugSchemeMPL from clvm.operators import KEYWORD_FROM_ATOM from clvm_tools.binutils import disassemble as bu_disassemble @@ -76,9 +74,9 @@ def debug_spend_bundle(spend_bundle, agg_sig_additional_data=DEFAULT_CONSTANTS.A pks = [] msgs = [] - created_coin_announcements: List[List[bytes]] = [] + created_coin_announcements: list[list[bytes]] = [] asserted_coin_announcements = [] - created_puzzle_announcements: List[List[bytes]] = [] + created_puzzle_announcements: list[list[bytes]] = [] asserted_puzzle_announcements = [] print("=" * 80) diff --git a/chia/wallet/util/merkle_tree.py b/chia/wallet/util/merkle_tree.py index d8905c7083d7..66458c01bbf8 100644 --- a/chia/wallet/util/merkle_tree.py +++ b/chia/wallet/util/merkle_tree.py @@ -2,7 +2,7 @@ import math from enum import Enum -from typing import List, Optional, Tuple +from typing import Optional from clvm.casts import int_to_bytes @@ -28,13 +28,13 @@ class TreeType(Enum): class MerkleTree: type: TreeType - nodes: List[bytes32] + nodes: list[bytes32] - def __init__(self, nodes: List[bytes32], waterfall: bool = False) -> None: + def __init__(self, nodes: list[bytes32], waterfall: bool = False) -> None: self.type = TreeType.WATERFALL if waterfall else TreeType.TREE self.nodes = nodes - def split_list(self, puzzle_hashes: List[bytes32]) -> Tuple[List[bytes32], List[bytes32]]: + def split_list(self, puzzle_hashes: list[bytes32]) -> tuple[list[bytes32], list[bytes32]]: if self.type == TreeType.TREE: mid_index = math.ceil(len(puzzle_hashes) / 2) first = puzzle_hashes[0:mid_index] @@ -45,7 +45,7 @@ def split_list(self, puzzle_hashes: List[bytes32]) -> Tuple[List[bytes32], List[ return first, rest - def _root(self, puzzle_hashes: List[bytes32]) -> bytes32: + def _root(self, puzzle_hashes: list[bytes32]) -> bytes32: if len(puzzle_hashes) == 1: return hash_an_atom(puzzle_hashes[0]) else: @@ -56,8 +56,8 @@ def calculate_root(self) -> bytes32: return self._root(self.nodes) def _proof( - self, puzzle_hashes: List[bytes32], searching_for: bytes32 - ) -> Tuple[Optional[int], Optional[List[bytes32]], bytes32, Optional[int]]: + self, puzzle_hashes: list[bytes32], searching_for: bytes32 + ) -> tuple[Optional[int], Optional[list[bytes32]], bytes32, Optional[int]]: if len(puzzle_hashes) == 1: atom_hash = hash_an_atom(puzzle_hashes[0]) if puzzle_hashes[0] == searching_for: @@ -95,6 +95,6 @@ def _proof( return (final_path, final_list, pair_hash, bit_num + 1 if bit_num is not None else None) - def generate_proof(self, leaf_reveal: bytes32) -> Tuple[Optional[int], List[Optional[List[bytes32]]]]: + def generate_proof(self, leaf_reveal: bytes32) -> tuple[Optional[int], list[Optional[list[bytes32]]]]: proof = self._proof(self.nodes, leaf_reveal) return (proof[0], [proof[1]]) diff --git a/chia/wallet/util/merkle_utils.py b/chia/wallet/util/merkle_utils.py index 96727c31cc21..960e8738dd5e 100644 --- a/chia/wallet/util/merkle_utils.py +++ b/chia/wallet/util/merkle_utils.py @@ -1,7 +1,7 @@ from __future__ import annotations import hashlib -from typing import Any, Dict, List, Tuple +from typing import Any from chia.types.blockchain_format.sized_bytes import bytes32 @@ -24,7 +24,7 @@ def sha256(*args: bytes) -> bytes32: return bytes32(hashlib.sha256(b"".join(args)).digest()) -def build_merkle_tree_from_binary_tree(tuples: TupleTree) -> Tuple[bytes32, Dict[bytes32, Tuple[int, List[bytes32]]]]: +def build_merkle_tree_from_binary_tree(tuples: TupleTree) -> tuple[bytes32, dict[bytes32, tuple[int, list[bytes32]]]]: if isinstance(tuples, bytes): tuples = bytes32(tuples) return sha256(HASH_LEAF_PREFIX, tuples), {tuples: (0, [])} @@ -45,7 +45,7 @@ def build_merkle_tree_from_binary_tree(tuples: TupleTree) -> Tuple[bytes32, Dict return new_root, new_proofs -def list_to_binary_tree(objects: List[Any]) -> Any: +def list_to_binary_tree(objects: list[Any]) -> Any: size = len(objects) if size == 0: raise ValueError("Cannot build a tree out of 0 objects") @@ -57,7 +57,7 @@ def list_to_binary_tree(objects: List[Any]) -> Any: return (list_to_binary_tree(first_half), list_to_binary_tree(last_half)) -def build_merkle_tree(objects: List[bytes32]) -> Tuple[bytes32, Dict[bytes32, Tuple[int, List[bytes32]]]]: +def build_merkle_tree(objects: list[bytes32]) -> tuple[bytes32, dict[bytes32, tuple[int, list[bytes32]]]]: """ return (merkle_root, dict_of_proofs) """ @@ -65,7 +65,7 @@ def build_merkle_tree(objects: List[bytes32]) -> Tuple[bytes32, Dict[bytes32, Tu return build_merkle_tree_from_binary_tree(objects_binary_tree) -def merkle_proof_from_path_and_tree(node_path: int, proof_tree: Proof_Tree_Type) -> Tuple[int, List[bytes32]]: +def merkle_proof_from_path_and_tree(node_path: int, proof_tree: Proof_Tree_Type) -> tuple[int, list[bytes32]]: proof_path = 0 proof = [] while not isinstance(proof_tree, bytes32): @@ -82,7 +82,7 @@ def merkle_proof_from_path_and_tree(node_path: int, proof_tree: Proof_Tree_Type) return proof_path, proof -def _simplify_merkle_proof(tree_hash: bytes32, proof: Tuple[int, List[bytes32]]) -> bytes32: +def _simplify_merkle_proof(tree_hash: bytes32, proof: tuple[int, list[bytes32]]) -> bytes32: # we return the expected merkle root path, nodes = proof for node in nodes: @@ -94,9 +94,9 @@ def _simplify_merkle_proof(tree_hash: bytes32, proof: Tuple[int, List[bytes32]]) return tree_hash -def simplify_merkle_proof(tree_hash: bytes32, proof: Tuple[int, List[bytes32]]) -> bytes32: +def simplify_merkle_proof(tree_hash: bytes32, proof: tuple[int, list[bytes32]]) -> bytes32: return _simplify_merkle_proof(sha256(HASH_LEAF_PREFIX, tree_hash), proof) -def check_merkle_proof(merkle_root: bytes32, tree_hash: bytes32, proof: Tuple[int, List[bytes32]]) -> bool: +def check_merkle_proof(merkle_root: bytes32, tree_hash: bytes32, proof: tuple[int, list[bytes32]]) -> bool: return merkle_root == simplify_merkle_proof(tree_hash, proof) diff --git a/chia/wallet/util/new_peak_queue.py b/chia/wallet/util/new_peak_queue.py index a6109dfd3387..7d3df970e41c 100644 --- a/chia/wallet/util/new_peak_queue.py +++ b/chia/wallet/util/new_peak_queue.py @@ -3,7 +3,7 @@ import asyncio import dataclasses from enum import IntEnum -from typing import Any, List +from typing import Any from chia.protocols.wallet_protocol import CoinStateUpdate, NewPeakWallet from chia.server.ws_connection import WSChiaConnection @@ -57,11 +57,11 @@ def __init__(self, inner_queue: asyncio.PriorityQueue): self._inner_queue: asyncio.PriorityQueue = inner_queue self._pending_data_process_items: int = 0 - async def subscribe_to_coin_ids(self, coin_ids: List[bytes32]): + async def subscribe_to_coin_ids(self, coin_ids: list[bytes32]): self._pending_data_process_items += 1 await self._inner_queue.put(NewPeakItem(NewPeakQueueTypes.COIN_ID_SUBSCRIPTION, coin_ids)) - async def subscribe_to_puzzle_hashes(self, puzzle_hashes: List[bytes32]): + async def subscribe_to_puzzle_hashes(self, puzzle_hashes: list[bytes32]): self._pending_data_process_items += 1 await self._inner_queue.put(NewPeakItem(NewPeakQueueTypes.PUZZLE_HASH_SUBSCRIPTION, puzzle_hashes)) diff --git a/chia/wallet/util/peer_request_cache.py b/chia/wallet/util/peer_request_cache.py index c71e87fa0ee0..3c68a7710e61 100644 --- a/chia/wallet/util/peer_request_cache.py +++ b/chia/wallet/util/peer_request_cache.py @@ -1,7 +1,7 @@ from __future__ import annotations import asyncio -from typing import Any, Dict, List, Optional, Set, Tuple +from typing import Any, Optional from chia.protocols.wallet_protocol import CoinState from chia.types.blockchain_format.sized_bytes import bytes32 @@ -13,15 +13,15 @@ class PeerRequestCache: _blocks: LRUCache[uint32, HeaderBlock] # height -> HeaderBlock - _block_requests: LRUCache[Tuple[uint32, uint32], asyncio.Task[Any]] # (start, end) -> Task + _block_requests: LRUCache[tuple[uint32, uint32], asyncio.Task[Any]] # (start, end) -> Task _states_validated: LRUCache[bytes32, Optional[uint32]] # coin state hash -> last change height, or None for reorg _timestamps: LRUCache[uint32, uint64] # block height -> timestamp _blocks_validated: LRUCache[bytes32, uint32] # header_hash -> height _block_signatures_validated: LRUCache[bytes32, uint32] # sig_hash -> height - _additions_in_block: LRUCache[Tuple[bytes32, bytes32], uint32] # header_hash, puzzle_hash -> height + _additions_in_block: LRUCache[tuple[bytes32, bytes32], uint32] # header_hash, puzzle_hash -> height # The wallet gets the state update before receiving the block. In untrusted mode the block is required for the # coin state validation, so we cache them before we apply them once we received the block. - _race_cache: Dict[uint32, Set[CoinState]] + _race_cache: dict[uint32, set[CoinState]] def __init__(self) -> None: self._blocks = LRUCache(100) @@ -91,7 +91,7 @@ def add_to_additions_in_block(self, header_hash: bytes32, addition_ph: bytes32, def in_additions_in_block(self, header_hash: bytes32, addition_ph: bytes32) -> bool: return self._additions_in_block.get((header_hash, addition_ph)) is not None - def add_states_to_race_cache(self, coin_states: List[CoinState]) -> None: + def add_states_to_race_cache(self, coin_states: list[CoinState]) -> None: for coin_state in coin_states: created_height = 0 if coin_state.created_height is None else coin_state.created_height spent_height = 0 if coin_state.spent_height is None else coin_state.spent_height @@ -99,7 +99,7 @@ def add_states_to_race_cache(self, coin_states: List[CoinState]) -> None: race_cache = self._race_cache.setdefault(max_height, set()) race_cache.add(coin_state) - def get_race_cache(self, height: int) -> Set[CoinState]: + def get_race_cache(self, height: int) -> set[CoinState]: return self._race_cache[uint32(height)] def rollback_race_cache(self, *, fork_height: int) -> None: @@ -120,7 +120,7 @@ def clear_after_height(self, height: int) -> None: new_blocks.put(k, v) self._blocks = new_blocks - new_block_requests: LRUCache[Tuple[uint32, uint32], asyncio.Task[Any]] = LRUCache(self._block_requests.capacity) + new_block_requests: LRUCache[tuple[uint32, uint32], asyncio.Task[Any]] = LRUCache(self._block_requests.capacity) for (start_h, end_h), fetch_task in self._block_requests.cache.items(): if start_h <= height and end_h <= height: new_block_requests.put((start_h, end_h), fetch_task) @@ -150,7 +150,7 @@ def clear_after_height(self, height: int) -> None: new_block_signatures_validated.put(sig_hash, h) self._block_signatures_validated = new_block_signatures_validated - new_additions_in_block: LRUCache[Tuple[bytes32, bytes32], uint32] = LRUCache(self._additions_in_block.capacity) + new_additions_in_block: LRUCache[tuple[bytes32, bytes32], uint32] = LRUCache(self._additions_in_block.capacity) for (hh, ph), h in self._additions_in_block.cache.items(): if h <= height: new_additions_in_block.put((hh, ph), h) diff --git a/chia/wallet/util/puzzle_compression.py b/chia/wallet/util/puzzle_compression.py index 8838cf77beba..87996d8d2b1d 100644 --- a/chia/wallet/util/puzzle_compression.py +++ b/chia/wallet/util/puzzle_compression.py @@ -1,7 +1,6 @@ from __future__ import annotations import zlib -from typing import List from chia.types.blockchain_format.program import Program from chia.wallet.cat_wallet.cat_utils import CAT_MOD @@ -85,7 +84,7 @@ def compress_object_with_puzzles(object_bytes: bytes, version: int) -> bytes: return version_blob + compressed_object_blob -def lowest_best_version(puzzle_list: List[bytes], max_version: int = len(ZDICT)) -> int: +def lowest_best_version(puzzle_list: list[bytes], max_version: int = len(ZDICT)) -> int: highest_version = 1 for mod in puzzle_list: for version, dict in enumerate(ZDICT): diff --git a/chia/wallet/util/puzzle_decorator.py b/chia/wallet/util/puzzle_decorator.py index dff15c72ed69..bff39e887049 100644 --- a/chia/wallet/util/puzzle_decorator.py +++ b/chia/wallet/util/puzzle_decorator.py @@ -1,7 +1,7 @@ from __future__ import annotations import logging -from typing import Any, Dict, List, Tuple +from typing import Any from typing_extensions import Protocol @@ -14,27 +14,27 @@ class PuzzleDecoratorProtocol(Protocol): @staticmethod - def create(config: Dict[str, Any]) -> PuzzleDecoratorProtocol: ... + def create(config: dict[str, Any]) -> PuzzleDecoratorProtocol: ... def decorate(self, inner_puzzle: Program) -> Program: ... def decorate_target_puzzle_hash( self, inner_puzzle: Program, target_puzzle_hash: bytes32 - ) -> Tuple[Program, bytes32]: ... + ) -> tuple[Program, bytes32]: ... def decorate_memos( - self, inner_puzzle: Program, target_puzzle_hash: bytes32, memos: List[bytes] - ) -> Tuple[Program, List[bytes]]: ... + self, inner_puzzle: Program, target_puzzle_hash: bytes32, memos: list[bytes] + ) -> tuple[Program, list[bytes]]: ... - def solve(self, puzzle: Program, primaries: List[Payment], inner_solution: Program) -> Tuple[Program, Program]: ... + def solve(self, puzzle: Program, primaries: list[Payment], inner_solution: Program) -> tuple[Program, Program]: ... class PuzzleDecoratorManager: - decorator_list: List[PuzzleDecoratorProtocol] + decorator_list: list[PuzzleDecoratorProtocol] log: logging.Logger @staticmethod - def create(config: List[Dict[str, Any]]) -> PuzzleDecoratorManager: + def create(config: list[dict[str, Any]]) -> PuzzleDecoratorManager: """ Create a new puzzle decorator manager :param config: Config @@ -75,7 +75,7 @@ def decorate_target_puzzle_hash(self, inner_puzzle: Program, target_puzzle_hash: inner_puzzle, target_puzzle_hash = decorator.decorate_target_puzzle_hash(inner_puzzle, target_puzzle_hash) return target_puzzle_hash - def solve(self, inner_puzzle: Program, primaries: List[Payment], inner_solution: Program) -> Program: + def solve(self, inner_puzzle: Program, primaries: list[Payment], inner_solution: Program) -> Program: """ Generate the solution of the puzzle :param inner_puzzle: Inner puzzle @@ -87,7 +87,7 @@ def solve(self, inner_puzzle: Program, primaries: List[Payment], inner_solution: inner_puzzle, inner_solution = decorator.solve(inner_puzzle, primaries, inner_solution) return inner_solution - def decorate_memos(self, inner_puzzle: Program, target_puzzle_hash: bytes32, memos: List[bytes]) -> List[bytes]: + def decorate_memos(self, inner_puzzle: Program, target_puzzle_hash: bytes32, memos: list[bytes]) -> list[bytes]: """ Decorate a memo list :param inner_puzzle: Inner puzzle diff --git a/chia/wallet/util/query_filter.py b/chia/wallet/util/query_filter.py index 49b82f47e239..99739bf646cd 100644 --- a/chia/wallet/util/query_filter.py +++ b/chia/wallet/util/query_filter.py @@ -2,7 +2,6 @@ from dataclasses import dataclass from enum import IntEnum -from typing import List from chia.types.blockchain_format.sized_bytes import bytes32 from chia.util.ints import uint8, uint64 @@ -18,43 +17,43 @@ class FilterMode(IntEnum): @streamable @dataclass(frozen=True) class TransactionTypeFilter(Streamable): - values: List[uint8] + values: list[uint8] mode: uint8 # FilterMode @classmethod - def include(cls, values: List[TransactionType]) -> TransactionTypeFilter: + def include(cls, values: list[TransactionType]) -> TransactionTypeFilter: return cls([uint8(t.value) for t in values], uint8(FilterMode.include)) @classmethod - def exclude(cls, values: List[TransactionType]) -> TransactionTypeFilter: + def exclude(cls, values: list[TransactionType]) -> TransactionTypeFilter: return cls([uint8(t.value) for t in values], uint8(FilterMode.exclude)) @streamable @dataclass(frozen=True) class AmountFilter(Streamable): - values: List[uint64] + values: list[uint64] mode: uint8 # FilterMode @classmethod - def include(cls, values: List[uint64]) -> AmountFilter: + def include(cls, values: list[uint64]) -> AmountFilter: return cls(values, mode=uint8(FilterMode.include)) @classmethod - def exclude(cls, values: List[uint64]) -> AmountFilter: + def exclude(cls, values: list[uint64]) -> AmountFilter: return cls(values, mode=uint8(FilterMode.exclude)) @streamable @dataclass(frozen=True) class HashFilter(Streamable): - values: List[bytes32] + values: list[bytes32] mode: uint8 # FilterMode @classmethod - def include(cls, values: List[bytes32]) -> HashFilter: + def include(cls, values: list[bytes32]) -> HashFilter: return cls(values, mode=uint8(FilterMode.include)) @classmethod - def exclude(cls, values: List[bytes32]) -> HashFilter: + def exclude(cls, values: list[bytes32]) -> HashFilter: return cls(values, mode=uint8(FilterMode.exclude)) diff --git a/chia/wallet/util/tx_config.py b/chia/wallet/util/tx_config.py index 463ec63704f0..5e1f43d45a13 100644 --- a/chia/wallet/util/tx_config.py +++ b/chia/wallet/util/tx_config.py @@ -1,7 +1,7 @@ from __future__ import annotations import dataclasses -from typing import Any, Dict, List, Optional, Type, TypeVar +from typing import Any, Optional, TypeVar from typing_extensions import NotRequired, TypedDict, Unpack @@ -17,10 +17,10 @@ class CoinSelectionConfig: min_coin_amount: uint64 max_coin_amount: uint64 - excluded_coin_amounts: List[uint64] - excluded_coin_ids: List[bytes32] + excluded_coin_amounts: list[uint64] + excluded_coin_ids: list[bytes32] - def to_json_dict(self) -> Dict[str, Any]: + def to_json_dict(self) -> dict[str, Any]: return CoinSelectionConfigLoader( self.min_coin_amount, self.max_coin_amount, @@ -46,7 +46,7 @@ def coin_selection_config(self) -> CoinSelectionConfig: self.excluded_coin_ids, ) - def to_json_dict(self) -> Dict[str, Any]: + def to_json_dict(self) -> dict[str, Any]: return TXConfigLoader( self.min_coin_amount, self.max_coin_amount, @@ -62,7 +62,7 @@ def override(self, **kwargs: Any) -> TXConfig: class AutofillArgs(TypedDict): constants: ConsensusConstants - config: NotRequired[Dict[str, Any]] + config: NotRequired[dict[str, Any]] logged_in_fingerprint: NotRequired[int] @@ -74,8 +74,8 @@ class AutofillArgs(TypedDict): class CoinSelectionConfigLoader(Streamable): min_coin_amount: Optional[uint64] = None max_coin_amount: Optional[uint64] = None - excluded_coin_amounts: Optional[List[uint64]] = None - excluded_coin_ids: Optional[List[bytes32]] = None + excluded_coin_amounts: Optional[list[uint64]] = None + excluded_coin_ids: Optional[list[bytes32]] = None def autofill( self, @@ -91,11 +91,11 @@ def autofill( @classmethod def from_json_dict( - cls: Type[_T_CoinSelectionConfigLoader], json_dict: Dict[str, Any] + cls: type[_T_CoinSelectionConfigLoader], json_dict: dict[str, Any] ) -> _T_CoinSelectionConfigLoader: if "excluded_coins" in json_dict: - excluded_coins: List[Coin] = [Coin.from_json_dict(c) for c in json_dict["excluded_coins"]] - excluded_coin_ids: List[str] = [c.name().hex() for c in excluded_coins] + excluded_coins: list[Coin] = [Coin.from_json_dict(c) for c in json_dict["excluded_coins"]] + excluded_coin_ids: list[str] = [c.name().hex() for c in excluded_coins] if "excluded_coin_ids" in json_dict: json_dict["excluded_coin_ids"] = [*excluded_coin_ids, *json_dict["excluded_coin_ids"]] else: @@ -118,7 +118,7 @@ def autofill( ) -> TXConfig: constants: ConsensusConstants = kwargs["constants"] if self.reuse_puzhash is None: - config: Dict[str, Any] = kwargs.get("config", {}) + config: dict[str, Any] = kwargs.get("config", {}) logged_in_fingerprint: int = kwargs.get("logged_in_fingerprint", -1) reuse_puzhash_config = config.get("reuse_public_key_for_change", None) if reuse_puzhash_config is None: diff --git a/chia/wallet/util/wallet_sync_utils.py b/chia/wallet/util/wallet_sync_utils.py index 8bf98aacef85..82095a207bcc 100644 --- a/chia/wallet/util/wallet_sync_utils.py +++ b/chia/wallet/util/wallet_sync_utils.py @@ -3,7 +3,7 @@ import asyncio import logging import random -from typing import Any, List, Optional, Set, Tuple, Union +from typing import Any, Optional, Union from chia_rs import compute_merkle_set_root, confirm_included_already_hashed, confirm_not_included_already_hashed @@ -46,10 +46,10 @@ class PeerRequestException(Exception): async def subscribe_to_phs( - puzzle_hashes: List[bytes32], + puzzle_hashes: list[bytes32], peer: WSChiaConnection, min_height: int, -) -> List[CoinState]: +) -> list[CoinState]: """ Tells full nodes that we are interested in puzzle hashes, and returns the response. """ @@ -63,10 +63,10 @@ async def subscribe_to_phs( async def subscribe_to_coin_updates( - coin_names: List[bytes32], + coin_names: list[bytes32], peer: WSChiaConnection, min_height: int, -) -> List[CoinState]: +) -> list[CoinState]: """ Tells full nodes that we are interested in coin ids, and returns the response. """ @@ -81,13 +81,13 @@ async def subscribe_to_coin_updates( def validate_additions( - coins: List[Tuple[bytes32, List[Coin]]], - proofs: Optional[List[Tuple[bytes32, bytes, Optional[bytes]]]], + coins: list[tuple[bytes32, list[Coin]]], + proofs: Optional[list[tuple[bytes32, bytes, Optional[bytes]]]], root: bytes32, ) -> bool: if proofs is None: # Verify root - additions_merkle_items: List[bytes32] = [] + additions_merkle_items: list[bytes32] = [] # Addition Merkle set contains puzzlehash and hash of all coins with that puzzlehash for puzzle_hash, coins_l in coins: @@ -100,7 +100,7 @@ def validate_additions( else: for i in range(len(coins)): assert coins[i][0] == proofs[i][0] - coin_list_1: List[Coin] = coins[i][1] + coin_list_1: list[Coin] = coins[i][1] puzzle_hash_proof: Optional[bytes] = proofs[i][1] coin_list_proof: Optional[bytes] = proofs[i][2] if len(coin_list_1) == 0: @@ -143,7 +143,7 @@ def validate_additions( def validate_removals( - coins: List[Tuple[bytes32, Optional[Coin]]], proofs: Optional[List[Tuple[bytes32, bytes]]], root: bytes32 + coins: list[tuple[bytes32, Optional[Coin]]], proofs: Optional[list[tuple[bytes32, bytes]]], root: bytes32 ) -> bool: if proofs is None: # If there are no proofs, it means all removals were returned in the response. @@ -235,7 +235,7 @@ def last_change_height_cs(cs: CoinState) -> uint32: return uint32(0) -def sort_coin_states(coin_states: Set[CoinState]) -> List[CoinState]: +def sort_coin_states(coin_states: set[CoinState]) -> list[CoinState]: return sorted( coin_states, key=lambda coin_state: ( @@ -248,7 +248,7 @@ def sort_coin_states(coin_states: Set[CoinState]) -> List[CoinState]: async def request_header_blocks( peer: WSChiaConnection, start_height: uint32, end_height: uint32 -) -> Optional[List[HeaderBlock]]: +) -> Optional[list[HeaderBlock]]: if Capability.BLOCK_HEADERS in peer.peer_capabilities: response = await peer.call_api( FullNodeAPI.request_block_headers, RequestBlockHeaders(start_height, end_height, False) @@ -262,7 +262,7 @@ async def request_header_blocks( async def _fetch_header_blocks_inner( - all_peers: List[Tuple[WSChiaConnection, bool]], + all_peers: list[tuple[WSChiaConnection, bool]], request_start: uint32, request_end: uint32, ) -> Optional[Union[RespondHeaderBlocks, RespondBlockHeaders]]: @@ -298,9 +298,9 @@ async def fetch_header_blocks_in_range( start: uint32, end: uint32, peer_request_cache: PeerRequestCache, - all_peers: List[Tuple[WSChiaConnection, bool]], -) -> Optional[List[HeaderBlock]]: - blocks: List[HeaderBlock] = [] + all_peers: list[tuple[WSChiaConnection, bool]], +) -> Optional[list[HeaderBlock]]: + blocks: list[HeaderBlock] = [] for i in range(start - (start % 32), end + 1, 32): request_start = min(uint32(i), end) request_end = min(uint32(i + 31), end) diff --git a/chia/wallet/vc_wallet/cr_cat_drivers.py b/chia/wallet/vc_wallet/cr_cat_drivers.py index ea82f53b2f53..55988447f715 100644 --- a/chia/wallet/vc_wallet/cr_cat_drivers.py +++ b/chia/wallet/vc_wallet/cr_cat_drivers.py @@ -1,9 +1,10 @@ from __future__ import annotations import functools +from collections.abc import Iterable from dataclasses import dataclass, replace from enum import IntEnum -from typing import Iterable, List, Optional, Tuple, Type, TypeVar +from typing import Optional, TypeVar from clvm.casts import int_to_bytes @@ -94,7 +95,7 @@ # Basic drivers def construct_cr_layer( - authorized_providers: List[bytes32], + authorized_providers: list[bytes32], proofs_checker: Program, inner_puzzle: Program, ) -> Program: @@ -128,7 +129,7 @@ def construct_cr_layer_hash( def match_cr_layer( uncurried_puzzle: UncurriedPuzzle, -) -> Optional[Tuple[List[bytes32], Program, Program]]: +) -> Optional[tuple[list[bytes32], Program, Program]]: extra_uncurried_puzzle = uncurry_puzzle(uncurried_puzzle.mod) if extra_uncurried_puzzle.mod == CREDENTIAL_RESTRICTION: return ( @@ -176,24 +177,24 @@ class CRCAT: coin: Coin tail_hash: bytes32 lineage_proof: LineageProof - authorized_providers: List[bytes32] + authorized_providers: list[bytes32] proofs_checker: Program inner_puzzle_hash: bytes32 @classmethod def launch( - cls: Type[_T_CRCAT], + cls: type[_T_CRCAT], # General CAT launching info origin_coin: Coin, payment: Payment, tail: Program, tail_solution: Program, # CR Layer params - authorized_providers: List[bytes32], + authorized_providers: list[bytes32], proofs_checker: Program, # Probably never need this but some tail might optional_lineage_proof: Optional[LineageProof] = None, - ) -> Tuple[Program, CoinSpend, CRCAT]: + ) -> tuple[Program, CoinSpend, CRCAT]: """ Launch a new CR-CAT from XCH. @@ -288,7 +289,7 @@ def construct_cr_layer(self, inner_puzzle: Program) -> Program: ) @staticmethod - def is_cr_cat(puzzle_reveal: UncurriedPuzzle) -> Tuple[bool, str]: + def is_cr_cat(puzzle_reveal: UncurriedPuzzle) -> tuple[bool, str]: """ This takes an (uncurried) puzzle reveal and returns a boolean for whether the puzzle is a CR-CAT and an error message for if the puzzle is a mismatch. @@ -310,7 +311,7 @@ def get_inner_solution(solution: Program) -> Program: # pragma: no cover return solution.at("f").at("rrrrrrf") @classmethod - def get_current_from_coin_spend(cls: Type[_T_CRCAT], spend: CoinSpend) -> CRCAT: # pragma: no cover + def get_current_from_coin_spend(cls: type[_T_CRCAT], spend: CoinSpend) -> CRCAT: # pragma: no cover uncurried_puzzle: UncurriedPuzzle = uncurry_puzzle(spend.puzzle_reveal) first_uncurried_cr_layer: UncurriedPuzzle = uncurry_puzzle(uncurried_puzzle.args.at("rrf")) second_uncurried_cr_layer: UncurriedPuzzle = uncurry_puzzle(first_uncurried_cr_layer.mod) @@ -329,10 +330,10 @@ def get_current_from_coin_spend(cls: Type[_T_CRCAT], spend: CoinSpend) -> CRCAT: @classmethod def get_next_from_coin_spend( - cls: Type[_T_CRCAT], + cls: type[_T_CRCAT], parent_spend: CoinSpend, conditions: Optional[Program] = None, # For optimization purposes, the conditions may already have been run - ) -> List[CRCAT]: + ) -> list[CRCAT]: """ Given a coin spend, this will return the next CR-CATs that were created as an output of that spend. Inner puzzle output conditions may also be supplied as an optimization. @@ -388,12 +389,12 @@ def get_next_from_coin_spend( uint64(parent_spend.coin.amount), ) - all_conditions: List[Program] = list(conditions.as_iter()) + all_conditions: list[Program] = list(conditions.as_iter()) if len(all_conditions) > 1000: raise RuntimeError("More than 1000 conditions not currently supported by CRCAT drivers") # pragma: no cover # Almost complete except the coin's full puzzle hash which we want to use the class method to calculate - partially_completed_crcats: List[CRCAT] = [ + partially_completed_crcats: list[CRCAT] = [ CRCAT( Coin(coin_name, bytes(32), uint64(condition.at("rrf").as_int())), bytes32(tail_hash_as_prog.as_atom()), @@ -438,7 +439,7 @@ def do_spend( inner_solution: Program, # For optimization purposes the conditions may already have been run conditions: Optional[Iterable[Program]] = None, - ) -> Tuple[List[AssertCoinAnnouncement], CoinSpend, List[CRCAT]]: + ) -> tuple[list[AssertCoinAnnouncement], CoinSpend, list[CRCAT]]: """ Spend a CR-CAT. @@ -449,8 +450,8 @@ def do_spend( Likely, spend_many is more useful. """ # Gather the output information - announcements: List[AssertCoinAnnouncement] = [] - new_inner_puzzle_hashes_and_amounts: List[Tuple[bytes32, uint64]] = [] + announcements: list[AssertCoinAnnouncement] = [] + new_inner_puzzle_hashes_and_amounts: list[tuple[bytes32, uint64]] = [] if conditions is None: conditions = inner_puzzle.run(inner_solution).as_iter() # pragma: no cover assert conditions is not None @@ -518,15 +519,15 @@ def do_spend( @classmethod def spend_many( - cls: Type[_T_CRCAT], - inner_spends: List[Tuple[_T_CRCAT, int, Program, Program]], # CRCAT, extra_delta, inner puzzle, inner solution + cls: type[_T_CRCAT], + inner_spends: list[tuple[_T_CRCAT, int, Program, Program]], # CRCAT, extra_delta, inner puzzle, inner solution # CR layer solving info proof_of_inclusions: Program, proof_checker_solution: Program, provider_id: bytes32, vc_launcher_id: bytes32, vc_inner_puzhash: Optional[bytes32], # Optional for incomplete spends - ) -> Tuple[List[AssertCoinAnnouncement], List[CoinSpend], List[CRCAT]]: + ) -> tuple[list[AssertCoinAnnouncement], list[CoinSpend], list[CRCAT]]: """ Spend a multiple CR-CATs. @@ -541,19 +542,19 @@ def next_index(index: int) -> int: def prev_index(index: int) -> int: return index - 1 - sorted_inner_spends: List[Tuple[_T_CRCAT, int, Program, Program]] = sorted( + sorted_inner_spends: list[tuple[_T_CRCAT, int, Program, Program]] = sorted( inner_spends, key=lambda spend: spend[0].coin.name(), ) - all_expected_announcements: List[AssertCoinAnnouncement] = [] - all_coin_spends: List[CoinSpend] = [] - all_new_crcats: List[CRCAT] = [] + all_expected_announcements: list[AssertCoinAnnouncement] = [] + all_coin_spends: list[CoinSpend] = [] + all_new_crcats: list[CRCAT] = [] subtotal: int = 0 for i, inner_spend in enumerate(sorted_inner_spends): crcat, extra_delta, inner_puzzle, inner_solution = inner_spend - conditions: List[Program] = list(inner_puzzle.run(inner_solution).as_iter()) + conditions: list[Program] = list(inner_puzzle.run(inner_solution).as_iter()) output_amount: int = ( sum( c.at("rrf").as_int() @@ -604,9 +605,9 @@ class CRCATSpend: crcat: CRCAT inner_puzzle: Program inner_solution: Program - children: List[CRCAT] + children: list[CRCAT] incomplete: bool - inner_conditions: List[Program] + inner_conditions: list[Program] proof_of_inclusions: Program @classmethod @@ -628,7 +629,7 @@ def from_coin_spend(cls, spend: CoinSpend) -> CRCATSpend: # pragma: no cover @streamable @dataclass(frozen=True) class ProofsChecker(Streamable): - flags: List[str] + flags: list[str] def as_program(self) -> Program: def byte_sort_flags(f1: str, f2: str) -> int: diff --git a/chia/wallet/vc_wallet/cr_cat_wallet.py b/chia/wallet/vc_wallet/cr_cat_wallet.py index b9f01e798012..2df1ce6efb4b 100644 --- a/chia/wallet/vc_wallet/cr_cat_wallet.py +++ b/chia/wallet/vc_wallet/cr_cat_wallet.py @@ -3,7 +3,7 @@ import logging import time import traceback -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Set, Tuple +from typing import TYPE_CHECKING, Any, Optional from chia_rs import G1Element, G2Element from typing_extensions import Unpack @@ -82,7 +82,7 @@ def cost_of_single_tx(self) -> int: async def create_new_cat_wallet( wallet_state_manager: WalletStateManager, wallet: Wallet, - cat_tail_info: Dict[str, Any], + cat_tail_info: dict[str, Any], amount: uint64, action_scope: WalletActionScope, fee: uint64 = uint64(0), @@ -97,7 +97,7 @@ async def get_or_create_wallet_for_cat( wallet: Wallet, limitations_program_hash_hex: str, name: Optional[str] = None, - authorized_providers: Optional[List[bytes32]] = None, + authorized_providers: Optional[list[bytes32]] = None, proofs_checker: Optional[ProofsChecker] = None, ) -> CRCATWallet: if authorized_providers is None or proofs_checker is None: # pragma: no cover @@ -134,7 +134,7 @@ async def create_from_puzzle_info( puzzle_driver: PuzzleInfo, name: Optional[str] = None, # We're hinting this as Any for mypy by should explore adding this to the wallet protocol and hinting properly - potential_subclasses: Dict[AssetType, Any] = {}, + potential_subclasses: dict[AssetType, Any] = {}, ) -> Any: cr_layer: Optional[PuzzleInfo] = puzzle_driver.also() if cr_layer is None: # pragma: no cover @@ -167,7 +167,7 @@ async def create( async def convert_to_cr( cls, cat_wallet: CATWallet, - authorized_providers: List[bytes32], + authorized_providers: list[bytes32], proofs_checker: ProofsChecker, ) -> None: replace_self = cls() @@ -216,7 +216,7 @@ async def coin_added( async def add_crcat_coin(self, coin_spend: CoinSpend, coin: Coin, height: uint32) -> None: try: - new_cr_cats: List[CRCAT] = CRCAT.get_next_from_coin_spend(coin_spend) + new_cr_cats: list[CRCAT] = CRCAT.get_next_from_coin_spend(coin_spend) hint_dict = { id: hc.hint for id, hc in compute_spend_hints_and_additions(coin_spend)[0].items() @@ -316,10 +316,10 @@ async def inner_puzzle_for_cat_puzhash(self, cat_hash: bytes32) -> Program: # p "inner_puzzle_for_cat_puzhash is a legacy method and is not available on CR-CAT wallets" ) - async def get_cat_spendable_coins(self, records: Optional[Set[WalletCoinRecord]] = None) -> List[WalletCoinRecord]: - result: List[WalletCoinRecord] = [] + async def get_cat_spendable_coins(self, records: Optional[set[WalletCoinRecord]] = None) -> list[WalletCoinRecord]: + result: list[WalletCoinRecord] = [] - record_list: Set[WalletCoinRecord] = await self.wallet_state_manager.get_spendable_coins_for_wallet( + record_list: set[WalletCoinRecord] = await self.wallet_state_manager.get_spendable_coins_for_wallet( self.id(), records ) @@ -330,7 +330,7 @@ async def get_cat_spendable_coins(self, records: Optional[Set[WalletCoinRecord]] return result - async def get_confirmed_balance(self, record_list: Optional[Set[WalletCoinRecord]] = None) -> uint128: + async def get_confirmed_balance(self, record_list: Optional[set[WalletCoinRecord]] = None) -> uint128: if record_list is None: record_list = await self.wallet_state_manager.coin_store.get_unspent_coins_for_wallet( self.id(), CoinType.CRCAT @@ -344,7 +344,7 @@ async def get_confirmed_balance(self, record_list: Optional[Set[WalletCoinRecord self.log.info(f"Confirmed balance for cat wallet {self.id()} is {amount}") return uint128(amount) - async def get_pending_approval_balance(self, record_list: Optional[Set[WalletCoinRecord]] = None) -> uint128: + async def get_pending_approval_balance(self, record_list: Optional[set[WalletCoinRecord]] = None) -> uint128: if record_list is None: record_list = await self.wallet_state_manager.coin_store.get_unspent_coins_for_wallet( self.id(), CoinType.CRCAT_PENDING @@ -397,12 +397,12 @@ async def get_lineage_proof_for_coin(self, coin: Coin) -> Optional[LineageProof] async def _generate_unsigned_spendbundle( self, - payments: List[Payment], + payments: list[Payment], action_scope: WalletActionScope, fee: uint64 = uint64(0), - cat_discrepancy: Optional[Tuple[int, Program, Program]] = None, # (extra_delta, tail_reveal, tail_solution) - coins: Optional[Set[Coin]] = None, - extra_conditions: Tuple[Condition, ...] = tuple(), + cat_discrepancy: Optional[tuple[int, Program, Program]] = None, # (extra_delta, tail_reveal, tail_solution) + coins: Optional[set[Coin]] = None, + extra_conditions: tuple[Condition, ...] = tuple(), add_authorizations_to_cr_cats: bool = True, ) -> WalletSpendBundle: if cat_discrepancy is not None: @@ -438,7 +438,7 @@ async def _generate_unsigned_spendbundle( # Calculate standard puzzle solutions change = selected_cat_amount - starting_amount - primaries: List[Payment] = [] + primaries: list[Payment] = [] for payment in payments: primaries.append(payment) @@ -474,12 +474,12 @@ async def _generate_unsigned_spendbundle( # Loop through the coins we've selected and gather the information we need to spend them vc: Optional[VerifiedCredential] = None - vc_announcements_to_make: List[bytes] = [] - inner_spends: List[Tuple[CRCAT, int, Program, Program]] = [] + vc_announcements_to_make: list[bytes] = [] + inner_spends: list[tuple[CRCAT, int, Program, Program]] = [] first = True announcement: CreateCoinAnnouncement - coin_ids: List[bytes32] = [coin.name() for coin in cat_coins] - coin_records: List[WalletCoinRecord] = ( + coin_ids: list[bytes32] = [coin.name() for coin in cat_coins] + coin_records: list[WalletCoinRecord] = ( await self.wallet_state_manager.coin_store.get_coin_records(coin_id_filter=HashFilter.include(coin_ids)) ).records assert len(coin_records) == len(cat_coins) @@ -596,17 +596,17 @@ async def _generate_unsigned_spendbundle( async def generate_signed_transaction( self, - amounts: List[uint64], - puzzle_hashes: List[bytes32], + amounts: list[uint64], + puzzle_hashes: list[bytes32], action_scope: WalletActionScope, fee: uint64 = uint64(0), - coins: Optional[Set[Coin]] = None, - memos: Optional[List[List[bytes]]] = None, - extra_conditions: Tuple[Condition, ...] = tuple(), + coins: Optional[set[Coin]] = None, + memos: Optional[list[list[bytes]]] = None, + extra_conditions: tuple[Condition, ...] = tuple(), **kwargs: Unpack[GSTOptionalArgs], ) -> None: # (extra_delta, tail_reveal, tail_solution) - cat_discrepancy: Optional[Tuple[int, Program, Program]] = kwargs.get("cat_discrepancy", None) + cat_discrepancy: Optional[tuple[int, Program, Program]] = kwargs.get("cat_discrepancy", None) add_authorizations_to_cr_cats: bool = kwargs.get("add_authorizations_to_cr_cats", True) if memos is None: memos = [[] for _ in range(len(puzzle_hashes))] @@ -616,7 +616,7 @@ async def generate_signed_transaction( payments = [] for amount, puzhash, memo_list in zip(amounts, puzzle_hashes, memos): - memos_with_hint: List[bytes] = [puzhash] + memos_with_hint: list[bytes] = [puzhash] memos_with_hint.extend(memo_list) # Force wrap the outgoing coins in the pending state if not going to us payments.append( @@ -643,10 +643,10 @@ async def generate_signed_transaction( ) async with action_scope.use() as interface: - other_tx_removals: Set[Coin] = { + other_tx_removals: set[Coin] = { removal for tx in interface.side_effects.transactions for removal in tx.removals } - other_tx_additions: Set[Coin] = { + other_tx_additions: set[Coin] = { addition for tx in interface.side_effects.transactions for addition in tx.additions } tx_list = [ @@ -679,15 +679,15 @@ async def claim_pending_approval_balance( min_amount_to_claim: uint64, action_scope: WalletActionScope, fee: uint64 = uint64(0), - coins: Optional[Set[Coin]] = None, + coins: Optional[set[Coin]] = None, min_coin_amount: Optional[uint64] = None, max_coin_amount: Optional[uint64] = None, - excluded_coin_amounts: Optional[List[uint64]] = None, + excluded_coin_amounts: Optional[list[uint64]] = None, reuse_puzhash: Optional[bool] = None, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> None: # Select the relevant CR-CAT coins - crcat_records: Set[WalletCoinRecord] = await self.wallet_state_manager.coin_store.get_unspent_coins_for_wallet( + crcat_records: set[WalletCoinRecord] = await self.wallet_state_manager.coin_store.get_unspent_coins_for_wallet( self.id(), CoinType.CRCAT_PENDING ) if coins is None: @@ -730,7 +730,7 @@ async def claim_pending_approval_balance( ).get_tree_hash() # Make CR-CAT bundle - crcats_and_puzhashes: List[Tuple[CRCAT, bytes32]] = [ + crcats_and_puzhashes: list[tuple[CRCAT, bytes32]] = [ (crcat, CRCATWallet.get_metadata_from_record(record).inner_puzzle_hash) for record in [r for r in crcat_records if r.coin in coins] for crcat in [self.coin_record_to_crcat(record)] @@ -775,8 +775,8 @@ async def claim_pending_approval_balance( ) async with action_scope.use() as interface: - other_additions: Set[Coin] = {rem for tx in interface.side_effects.transactions for rem in tx.additions} - other_removals: Set[Coin] = {rem for tx in interface.side_effects.transactions for rem in tx.removals} + other_additions: set[Coin] = {rem for tx in interface.side_effects.transactions for rem in tx.additions} + other_removals: set[Coin] = {rem for tx in interface.side_effects.transactions for rem in tx.removals} interface.side_effects.transactions.append( TransactionRecord( confirmed_at_height=uint32(0), diff --git a/chia/wallet/vc_wallet/cr_outer_puzzle.py b/chia/wallet/vc_wallet/cr_outer_puzzle.py index 183b8c790ba9..d2bbe2e0bfd2 100644 --- a/chia/wallet/vc_wallet/cr_outer_puzzle.py +++ b/chia/wallet/vc_wallet/cr_outer_puzzle.py @@ -1,7 +1,7 @@ from __future__ import annotations from dataclasses import dataclass -from typing import Any, Callable, Dict, List, Optional, Tuple +from typing import Any, Callable, Optional from clvm_tools.binutils import disassemble @@ -23,11 +23,11 @@ class CROuterPuzzle: _get_inner_solution: Callable[[PuzzleInfo, Program], Optional[Program]] def match(self, puzzle: UncurriedPuzzle) -> Optional[PuzzleInfo]: - args: Optional[Tuple[List[bytes32], Program, Program]] = match_cr_layer(puzzle) + args: Optional[tuple[list[bytes32], Program, Program]] = match_cr_layer(puzzle) if args is None: return None authorized_providers, proofs_checker, inner_puzzle = args - constructor_dict: Dict[str, Any] = { + constructor_dict: dict[str, Any] = { "type": "credential restricted", "authorized_providers": ["0x" + ap.hex() for ap in authorized_providers], "proofs_checker": disassemble(proofs_checker), @@ -38,7 +38,7 @@ def match(self, puzzle: UncurriedPuzzle) -> Optional[PuzzleInfo]: return PuzzleInfo(constructor_dict) def get_inner_puzzle(self, constructor: PuzzleInfo, puzzle_reveal: UncurriedPuzzle) -> Optional[Program]: - args: Optional[Tuple[List[bytes32], Program, Program]] = match_cr_layer(puzzle_reveal) + args: Optional[tuple[list[bytes32], Program, Program]] = match_cr_layer(puzzle_reveal) if args is None: raise ValueError("This driver is not for the specified puzzle reveal") # pragma: no cover _, _, inner_puzzle = args diff --git a/chia/wallet/vc_wallet/vc_drivers.py b/chia/wallet/vc_wallet/vc_drivers.py index 1cff3e4edc4d..9ba68ec1e211 100644 --- a/chia/wallet/vc_wallet/vc_drivers.py +++ b/chia/wallet/vc_wallet/vc_drivers.py @@ -1,7 +1,7 @@ from __future__ import annotations from dataclasses import dataclass, replace -from typing import List, Optional, Tuple, Type, TypeVar +from typing import Optional, TypeVar from chia.types.blockchain_format.coin import Coin from chia.types.blockchain_format.program import Program @@ -103,7 +103,7 @@ def create_covenant_layer(initial_puzzle_hash: bytes32, parent_morpher: Program, ) -def match_covenant_layer(uncurried_puzzle: UncurriedPuzzle) -> Optional[Tuple[bytes32, Program, Program]]: +def match_covenant_layer(uncurried_puzzle: UncurriedPuzzle) -> Optional[tuple[bytes32, Program, Program]]: if uncurried_puzzle.mod == COVENANT_LAYER: return ( bytes32(uncurried_puzzle.args.at("f").as_atom()), @@ -166,7 +166,7 @@ def create_did_tp( EML_DID_TP_FULL_HASH = create_did_tp().get_tree_hash() -def match_did_tp(uncurried_puzzle: UncurriedPuzzle) -> Optional[Tuple[()]]: +def match_did_tp(uncurried_puzzle: UncurriedPuzzle) -> Optional[tuple[()]]: if uncurried_puzzle.mod == EML_DID_TP: return () else: @@ -198,7 +198,7 @@ def create_viral_backdoor(hidden_puzzle_hash: bytes32, inner_puzzle_hash: bytes3 ) -def match_viral_backdoor(uncurried_puzzle: UncurriedPuzzle) -> Optional[Tuple[bytes32, bytes32]]: +def match_viral_backdoor(uncurried_puzzle: UncurriedPuzzle) -> Optional[tuple[bytes32, bytes32]]: if uncurried_puzzle.mod == VIRAL_BACKDOOR: return bytes32(uncurried_puzzle.args.at("rf").as_atom()), bytes32(uncurried_puzzle.args.at("rrf").as_atom()) else: @@ -328,14 +328,14 @@ class VerifiedCredential(Streamable): @classmethod def launch( - cls: Type[_T_VerifiedCredential], - origin_coins: List[Coin], + cls: type[_T_VerifiedCredential], + origin_coins: list[Coin], provider_id: bytes32, new_inner_puzzle_hash: bytes32, - memos: List[bytes32], + memos: list[bytes32], fee: uint64 = uint64(0), - extra_conditions: Tuple[Condition, ...] = tuple(), - ) -> Tuple[List[Program], List[CoinSpend], _T_VerifiedCredential]: + extra_conditions: tuple[Condition, ...] = tuple(), + ) -> tuple[list[Program], list[CoinSpend], _T_VerifiedCredential]: """ Launch a VC. @@ -414,7 +414,7 @@ def launch( ) primary_dpuz: Program = Program.to((1, create_launcher_conditions)) - additional_dpuzs: List[Program] = [Program.to((1, [[61, second_launcher_announcement_hash]]))] + additional_dpuzs: list[Program] = [Program.to((1, [[61, second_launcher_announcement_hash]]))] return ( [primary_dpuz, *additional_dpuzs], [ @@ -496,7 +496,7 @@ def hidden_puzzle(self) -> Program: #################################################################################################################### @staticmethod - def is_vc(puzzle_reveal: UncurriedPuzzle) -> Tuple[bool, str]: + def is_vc(puzzle_reveal: UncurriedPuzzle) -> tuple[bool, str]: """ This takes an (uncurried) puzzle reveal and returns a boolean for whether the puzzle is a VC and an error message for if the puzzle is a mismatch. Returns True for VC launcher spends. @@ -555,7 +555,7 @@ def is_vc(puzzle_reveal: UncurriedPuzzle) -> Tuple[bool, str]: return True, "" @classmethod - def get_next_from_coin_spend(cls: Type[_T_VerifiedCredential], parent_spend: CoinSpend) -> _T_VerifiedCredential: + def get_next_from_coin_spend(cls: type[_T_VerifiedCredential], parent_spend: CoinSpend) -> _T_VerifiedCredential: """ Given a coin spend, this will return the next VC that was create as an output of that spend. This is the main method to use when syncing. If a spend has been identified as having a VC puzzle reveal, running this method @@ -585,7 +585,7 @@ def get_next_from_coin_spend(cls: Type[_T_VerifiedCredential], parent_spend: Coi dpuz: Program = solution.at("rrf").at("f").at("f") dsol: Program = solution.at("rrf").at("f").at("rf") - conditions: List[Program] = list(dpuz.run(dsol).as_iter()) + conditions: list[Program] = list(dpuz.run(dsol).as_iter()) remark_condition: Program = next(c for c in conditions if c.at("f").as_int() == 1) inner_puzzle_hash = bytes32(remark_condition.at("rf").as_atom()) magic_condition: Program = next(c for c in conditions if c.at("f").as_int() == -10) @@ -707,7 +707,7 @@ def do_spend( inner_solution: Program, new_proof_hash: Optional[bytes32] = None, new_proof_provider: Optional[bytes32] = None, - ) -> Tuple[Optional[CreatePuzzleAnnouncement], CoinSpend, VerifiedCredential]: + ) -> tuple[Optional[CreatePuzzleAnnouncement], CoinSpend, VerifiedCredential]: """ Given an inner puzzle reveal and solution, spend the VC (potentially updating the proofs in the process). Note that the inner puzzle is already expected to output the 'magic' condition (which can be created above). @@ -760,7 +760,7 @@ def do_spend( def activate_backdoor( self, provider_innerpuzhash: bytes32, announcement_nonce: Optional[bytes32] = None - ) -> Tuple[CreatePuzzleAnnouncement, CoinSpend]: + ) -> tuple[CreatePuzzleAnnouncement, CoinSpend]: """ Activates the backdoor in the VC to revoke the credentials and remove the provider's DID. diff --git a/chia/wallet/vc_wallet/vc_store.py b/chia/wallet/vc_wallet/vc_store.py index d12bd3781a5f..07fc1a03b95e 100644 --- a/chia/wallet/vc_wallet/vc_store.py +++ b/chia/wallet/vc_wallet/vc_store.py @@ -2,7 +2,7 @@ import dataclasses from functools import cmp_to_key -from typing import Dict, List, Optional, Tuple, Type, TypeVar +from typing import Optional, TypeVar from aiosqlite import Row @@ -19,10 +19,10 @@ @dataclasses.dataclass(frozen=True) class VCProofs: - key_value_pairs: Dict[str, str] + key_value_pairs: dict[str, str] def as_program(self) -> Program: - def byte_sort_pairs(f1: Tuple[str, str], f2: Tuple[str, str]) -> int: + def byte_sort_pairs(f1: tuple[str, str], f2: tuple[str, str]) -> int: return 1 if Program.to([10, (1, f1[0]), (1, f2[0])]).run([]) == Program.to(None) else -1 prog: Program = Program.to( @@ -45,7 +45,7 @@ def from_program(prog: Program) -> VCProofs: first: Program = prog.at("f") rest: Program = prog.at("r") if first.atom is None and rest.atom is None: - final_dict: Dict[str, str] = {} + final_dict: dict[str, str] = {} final_dict.update(VCProofs.from_program(first).key_value_pairs) final_dict.update(VCProofs.from_program(rest).key_value_pairs) return VCProofs(final_dict) @@ -54,7 +54,7 @@ def from_program(prog: Program) -> VCProofs: else: raise ValueError("Malformatted VCProofs program") # pragma: no cover - def prove_keys(self, keys: List[str], tree: Optional[Program] = None) -> Program: + def prove_keys(self, keys: list[str], tree: Optional[Program] = None) -> Program: if tree is None: tree = self.as_program() @@ -114,7 +114,7 @@ class VCStore: db_wrapper: DBWrapper2 @classmethod - async def create(cls: Type[_T_VCStore], db_wrapper: DBWrapper2) -> _T_VCStore: + async def create(cls: type[_T_VCStore], db_wrapper: DBWrapper2) -> _T_VCStore: self = cls() self.db_wrapper = db_wrapper @@ -191,7 +191,7 @@ async def get_vc_record(self, launcher_id: bytes32) -> Optional[VCRecord]: return _row_to_vc_record(row) return None - async def get_vc_records_by_providers(self, provider_ids: List[bytes32]) -> List[VCRecord]: # pragma: no cover + async def get_vc_records_by_providers(self, provider_ids: list[bytes32]) -> list[VCRecord]: # pragma: no cover """ Checks DB for VCs with a proof_provider in a specified list and returns them. """ @@ -206,7 +206,7 @@ async def get_vc_records_by_providers(self, provider_ids: List[bytes32]) -> List return [_row_to_vc_record(row) for row in rows] - async def get_unconfirmed_vcs(self) -> List[VCRecord]: + async def get_unconfirmed_vcs(self) -> list[VCRecord]: """ Returns all VCs that have not yet been marked confirmed (confirmed_height == 0) """ @@ -222,7 +222,7 @@ async def get_vc_record_list( self, start_index: int = 0, count: int = 50, - ) -> List[VCRecord]: + ) -> list[VCRecord]: """ Return all VCs :param start_index: Start index diff --git a/chia/wallet/vc_wallet/vc_wallet.py b/chia/wallet/vc_wallet/vc_wallet.py index 9216839824a5..56d77e813097 100644 --- a/chia/wallet/vc_wallet/vc_wallet.py +++ b/chia/wallet/vc_wallet/vc_wallet.py @@ -3,7 +3,7 @@ import logging import time import traceback -from typing import TYPE_CHECKING, Dict, List, Optional, Set, Tuple, Type, TypeVar, Union +from typing import TYPE_CHECKING, Optional, TypeVar, Union from chia_rs import G1Element, G2Element from clvm.casts import int_to_bytes @@ -63,7 +63,7 @@ class VCWallet: @classmethod async def create_new_vc_wallet( - cls: Type[_T_VCWallet], + cls: type[_T_VCWallet], wallet_state_manager: WalletStateManager, wallet: Wallet, name: Optional[str] = None, @@ -80,7 +80,7 @@ async def create_new_vc_wallet( @classmethod async def create( - cls: Type[_T_VCWallet], + cls: type[_T_VCWallet], wallet_state_manager: WalletStateManager, wallet: Wallet, wallet_info: WalletInfo, @@ -110,7 +110,7 @@ async def coin_added( """ # TODO Use coin_data instead of calling peer API wallet_node = self.wallet_state_manager.wallet_node - coin_states: Optional[List[CoinState]] = await wallet_node.get_coin_state([coin.parent_coin_info], peer=peer) + coin_states: Optional[list[CoinState]] = await wallet_node.get_coin_state([coin.parent_coin_info], peer=peer) if coin_states is None: self.log.error( f"Cannot find parent coin of the verified credential coin: {coin.name().hex()}" @@ -165,7 +165,7 @@ async def launch_new_vc( action_scope: WalletActionScope, inner_puzzle_hash: Optional[bytes32] = None, fee: uint64 = uint64(0), - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> VCRecord: """ Given the DID ID of a proof provider, mint a brand new VC with an empty slot for proofs. @@ -204,8 +204,8 @@ async def launch_new_vc( coin_spends.append(make_spend(coin, puzzle, solution)) spend_bundle = WalletSpendBundle(coin_spends, G2Element()) now = uint64(int(time.time())) - add_list: List[Coin] = list(spend_bundle.additions()) - rem_list: List[Coin] = list(spend_bundle.removals()) + add_list: list[Coin] = list(spend_bundle.additions()) + rem_list: list[Coin] = list(spend_bundle.removals()) vc_record: VCRecord = VCRecord(vc, uint32(0)) async with action_scope.use() as interface: interface.side_effects.transactions.append( @@ -238,7 +238,7 @@ async def generate_signed_transaction( action_scope: WalletActionScope, fee: uint64 = uint64(0), new_inner_puzhash: Optional[bytes32] = None, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), **kwargs: Unpack[GSTOptionalArgs], ) -> None: new_proof_hash: Optional[bytes32] = kwargs.get( @@ -265,7 +265,7 @@ async def generate_signed_transaction( if new_inner_puzhash is None: new_inner_puzhash = inner_puzhash - primaries: List[Payment] = [Payment(new_inner_puzhash, uint64(vc_record.vc.coin.amount), [new_inner_puzhash])] + primaries: list[Payment] = [Payment(new_inner_puzhash, uint64(vc_record.vc.coin.amount), [new_inner_puzhash])] if fee > 0: coin_name = vc_record.vc.coin.name() @@ -316,8 +316,8 @@ async def generate_signed_transaction( raise ValueError( f"Cannot find the required DID {vc_record.vc.proof_provider.hex()}." ) # pragma: no cover - add_list: List[Coin] = list(spend_bundle.additions()) - rem_list: List[Coin] = list(spend_bundle.removals()) + add_list: list[Coin] = list(spend_bundle.additions()) + rem_list: list[Coin] = list(spend_bundle.removals()) now = uint64(int(time.time())) async with action_scope.use() as interface: @@ -349,9 +349,9 @@ async def revoke_vc( peer: WSChiaConnection, action_scope: WalletActionScope, fee: uint64 = uint64(0), - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> None: - vc_coin_states: List[CoinState] = await self.wallet_state_manager.wallet_node.get_coin_state( + vc_coin_states: list[CoinState] = await self.wallet_state_manager.wallet_node.get_coin_state( [parent_id], peer=peer ) if vc_coin_states is None: @@ -377,7 +377,7 @@ async def revoke_vc( ) return - recovery_info: Optional[Tuple[bytes32, bytes32, uint64]] = await did_wallet.get_info_for_recovery() + recovery_info: Optional[tuple[bytes32, bytes32, uint64]] = await did_wallet.get_info_for_recovery() if recovery_info is None: raise RuntimeError("DID could not currently be accessed while trying to revoke VC") # pragma: no cover _, provider_inner_puzhash, _ = recovery_info @@ -387,8 +387,8 @@ async def revoke_vc( coins.add(vc.coin) if fee > 0: coins.update(await self.standard_wallet.select_coins(fee, action_scope)) - sorted_coins: List[Coin] = sorted(coins, key=Coin.name) - sorted_coin_list: List[List[Union[bytes32, uint64]]] = [coin_as_list(c) for c in sorted_coins] + sorted_coins: list[Coin] = sorted(coins, key=Coin.name) + sorted_coin_list: list[list[Union[bytes32, uint64]]] = [coin_as_list(c) for c in sorted_coins] nonce: bytes32 = SerializedProgram.to(sorted_coin_list).get_tree_hash() vc_announcement: AssertCoinAnnouncement = AssertCoinAnnouncement(asserted_id=vc.coin.name(), asserted_msg=nonce) @@ -408,7 +408,7 @@ async def revoke_vc( async def add_vc_authorization( self, offer: Offer, solver: Solver, action_scope: WalletActionScope - ) -> Tuple[Offer, Solver]: + ) -> tuple[Offer, Solver]: """ This method takes an existing offer and adds a VC authorization spend to it where it can/is willing. The only coins types that it looks for to approve are CR-CATs at the moment. @@ -422,9 +422,9 @@ async def add_vc_authorization( send the change back to it's original puzzle hash or else a taker wallet will not approve it. """ # Gather all of the CRCATs being spent and the CRCATs that each creates - crcat_spends: List[CRCATSpend] = [] - other_spends: List[CoinSpend] = [] - spends_to_fix: Dict[bytes32, CoinSpend] = {} + crcat_spends: list[CRCATSpend] = [] + other_spends: list[CoinSpend] = [] + spends_to_fix: dict[bytes32, CoinSpend] = {} for spend in offer.to_valid_spend().coin_spends: if CRCAT.is_cr_cat(uncurry_puzzle(spend.puzzle_reveal))[0]: crcat_spend: CRCATSpend = CRCATSpend.from_coin_spend(spend) @@ -440,13 +440,13 @@ async def add_vc_authorization( other_spends.append(spend) # Figure out what VC announcements are needed - announcements_to_make: Dict[bytes32, List[CreatePuzzleAnnouncement]] = {} - announcements_to_assert: Dict[bytes32, List[AssertCoinAnnouncement]] = {} - vcs: Dict[bytes32, VerifiedCredential] = {} - coin_args: Dict[str, List[str]] = {} + announcements_to_make: dict[bytes32, list[CreatePuzzleAnnouncement]] = {} + announcements_to_assert: dict[bytes32, list[AssertCoinAnnouncement]] = {} + vcs: dict[bytes32, VerifiedCredential] = {} + coin_args: dict[str, list[str]] = {} for crcat_spend in crcat_spends: # Check first whether we can approve... - available_vcs: List[VCRecord] = [ + available_vcs: list[VCRecord] = [ vc_rec for vc_rec in await self.store.get_vc_records_by_providers(crcat_spend.crcat.authorized_providers) if vc_rec.confirmed_at_height != 0 @@ -572,9 +572,9 @@ async def add_vc_authorization( ), Solver({"vc_authorizations": coin_args}) async def get_vc_with_provider_in_and_proofs( - self, authorized_providers: List[bytes32], proofs: List[str] + self, authorized_providers: list[bytes32], proofs: list[str] ) -> VerifiedCredential: - vc_records: List[VCRecord] = await self.store.get_vc_records_by_providers(authorized_providers) + vc_records: list[VCRecord] = await self.store.get_vc_records_by_providers(authorized_providers) if len(vc_records) == 0: # pragma: no cover raise ValueError(f"VCWallet has no VCs with providers in the following list: {authorized_providers}") else: @@ -588,7 +588,7 @@ async def get_vc_with_provider_in_and_proofs( return rec.vc raise ValueError(f"No authorized VC has the correct proofs: {proofs}") # pragma: no cover - async def proof_of_inclusions_for_root_and_keys(self, root: bytes32, keys: List[str]) -> Program: + async def proof_of_inclusions_for_root_and_keys(self, root: bytes32, keys: list[str]) -> Program: vc_proofs: Optional[VCProofs] = await self.store.get_proofs_for_root(root) if vc_proofs is None: raise RuntimeError(f"No proofs exist for VC root: {root.hex()}") # pragma: no cover @@ -599,25 +599,25 @@ async def select_coins( self, amount: uint64, action_scope: WalletActionScope, - ) -> Set[Coin]: + ) -> set[Coin]: raise RuntimeError("VCWallet does not support select_coins()") # pragma: no cover - async def get_confirmed_balance(self, record_list: Optional[Set[WalletCoinRecord]] = None) -> uint128: + async def get_confirmed_balance(self, record_list: Optional[set[WalletCoinRecord]] = None) -> uint128: """The VC wallet doesn't really have a balance.""" return uint128(0) # pragma: no cover - async def get_unconfirmed_balance(self, record_list: Optional[Set[WalletCoinRecord]] = None) -> uint128: + async def get_unconfirmed_balance(self, record_list: Optional[set[WalletCoinRecord]] = None) -> uint128: """The VC wallet doesn't really have a balance.""" return uint128(0) # pragma: no cover - async def get_spendable_balance(self, unspent_records: Optional[Set[WalletCoinRecord]] = None) -> uint128: + async def get_spendable_balance(self, unspent_records: Optional[set[WalletCoinRecord]] = None) -> uint128: """The VC wallet doesn't really have a balance.""" return uint128(0) # pragma: no cover async def get_pending_change_balance(self) -> uint64: return uint64(0) # pragma: no cover - async def get_max_send_amount(self, records: Optional[Set[WalletCoinRecord]] = None) -> uint128: + async def get_max_send_amount(self, records: Optional[set[WalletCoinRecord]] = None) -> uint128: """This is the confirmed balance, which we set to 0 as the VC wallet doesn't have one.""" return uint128(0) # pragma: no cover diff --git a/chia/wallet/wallet.py b/chia/wallet/wallet.py index 3fd4580b7188..58ebbeb67885 100644 --- a/chia/wallet/wallet.py +++ b/chia/wallet/wallet.py @@ -2,7 +2,7 @@ import logging import time -from typing import TYPE_CHECKING, Any, ClassVar, Dict, List, Optional, Set, Tuple, cast +from typing import TYPE_CHECKING, Any, ClassVar, Optional, cast from chia_rs import AugSchemeMPL, G1Element, G2Element, PrivateKey from typing_extensions import Unpack @@ -94,14 +94,14 @@ def max_send_quantity(self) -> int: # avoid full block TXs return int(self.wallet_state_manager.constants.MAX_BLOCK_COST_CLVM / 5 / self.cost_of_single_tx) - async def get_max_spendable_coins(self, records: Optional[Set[WalletCoinRecord]] = None) -> Set[WalletCoinRecord]: - spendable: List[WalletCoinRecord] = list( + async def get_max_spendable_coins(self, records: Optional[set[WalletCoinRecord]] = None) -> set[WalletCoinRecord]: + spendable: list[WalletCoinRecord] = list( await self.wallet_state_manager.get_spendable_coins_for_wallet(self.id(), records) ) spendable.sort(reverse=True, key=lambda record: record.coin.amount) return set(spendable[0 : min(len(spendable), self.max_send_quantity)]) - async def get_max_send_amount(self, records: Optional[Set[WalletCoinRecord]] = None) -> uint128: + async def get_max_send_amount(self, records: Optional[set[WalletCoinRecord]] = None) -> uint128: return uint128(sum(cr.coin.amount for cr in await self.get_max_spendable_coins())) @classmethod @@ -111,20 +111,20 @@ def type(cls) -> WalletType: def id(self) -> uint32: return self.wallet_id - async def get_confirmed_balance(self, record_list: Optional[Set[WalletCoinRecord]] = None) -> uint128: + async def get_confirmed_balance(self, record_list: Optional[set[WalletCoinRecord]] = None) -> uint128: return await self.wallet_state_manager.get_confirmed_balance_for_wallet(self.id(), record_list) - async def get_unconfirmed_balance(self, unspent_records: Optional[Set[WalletCoinRecord]] = None) -> uint128: + async def get_unconfirmed_balance(self, unspent_records: Optional[set[WalletCoinRecord]] = None) -> uint128: return await self.wallet_state_manager.get_unconfirmed_balance(self.id(), unspent_records) - async def get_spendable_balance(self, unspent_records: Optional[Set[WalletCoinRecord]] = None) -> uint128: + async def get_spendable_balance(self, unspent_records: Optional[set[WalletCoinRecord]] = None) -> uint128: spendable = await self.wallet_state_manager.get_confirmed_spendable_balance_for_wallet( self.id(), unspent_records ) return spendable async def get_pending_change_balance(self) -> uint64: - unconfirmed_tx: List[TransactionRecord] = await self.wallet_state_manager.tx_store.get_unconfirmed_for_wallet( + unconfirmed_tx: list[TransactionRecord] = await self.wallet_state_manager.tx_store.get_unconfirmed_for_wallet( self.id() ) addition_amount = 0 @@ -206,12 +206,12 @@ async def get_new_puzzlehash(self) -> bytes32: def make_solution( self, - primaries: List[Payment], - conditions: Tuple[Condition, ...] = tuple(), + primaries: list[Payment], + conditions: tuple[Condition, ...] = tuple(), fee: uint64 = uint64(0), ) -> Program: assert fee >= 0 - condition_list: List[Any] = [condition.to_program() for condition in conditions] + condition_list: list[Any] = [condition.to_program() for condition in conditions] if len(primaries) > 0: for primary in primaries: condition_list.append(make_create_coin_condition(primary.puzzle_hash, primary.amount, primary.memos)) @@ -229,17 +229,17 @@ async def select_coins( self, amount: uint64, action_scope: WalletActionScope, - ) -> Set[Coin]: + ) -> set[Coin]: """ Returns a set of coins that can be used for generating a new transaction. Note: Must be called under wallet state manager lock """ spendable_amount: uint128 = await self.get_spendable_balance() - spendable_coins: List[WalletCoinRecord] = list(await self.get_max_spendable_coins()) + spendable_coins: list[WalletCoinRecord] = list(await self.get_max_spendable_coins()) # Try to use coins from the store, if there isn't enough of "unused" # coins use change coins that are not confirmed yet - unconfirmed_removals: Dict[bytes32, Coin] = await self.wallet_state_manager.unconfirmed_removals_for_wallet( + unconfirmed_removals: dict[bytes32, Coin] = await self.wallet_state_manager.unconfirmed_removals_for_wallet( self.id() ) async with action_scope.use() as interface: @@ -262,13 +262,13 @@ async def _generate_unsigned_transaction( action_scope: WalletActionScope, fee: uint64 = uint64(0), origin_id: Optional[bytes32] = None, - coins: Optional[Set[Coin]] = None, - primaries_input: Optional[List[Payment]] = None, - memos: Optional[List[bytes]] = None, + coins: Optional[set[Coin]] = None, + primaries_input: Optional[list[Payment]] = None, + memos: Optional[list[bytes]] = None, negative_change_allowed: bool = False, - puzzle_decorator_override: Optional[List[Dict[str, Any]]] = None, - extra_conditions: Tuple[Condition, ...] = tuple(), - ) -> List[CoinSpend]: + puzzle_decorator_override: Optional[list[dict[str, Any]]] = None, + extra_conditions: tuple[Condition, ...] = tuple(), + ) -> list[CoinSpend]: """ Generates a unsigned transaction in form of List(Puzzle, Solutions) Note: this must be called under a wallet state manager lock @@ -303,7 +303,7 @@ async def _generate_unsigned_transaction( assert change >= 0 - spends: List[CoinSpend] = [] + spends: list[CoinSpend] = [] primary_announcement: Optional[AssertCoinAnnouncement] = None # Check for duplicates @@ -318,7 +318,7 @@ async def _generate_unsigned_transaction( decorated_target_puzzle_hash = decorator_manager.decorate_target_puzzle_hash( inner_puzzle, newpuzzlehash ) - target_primary: List[Payment] = [] + target_primary: list[Payment] = [] if memos is None: memos = [] memos = decorator_manager.decorate_memos(inner_puzzle, newpuzzlehash, memos) @@ -337,7 +337,7 @@ async def _generate_unsigned_transaction( else: change_puzzle_hash = await self.get_new_puzzlehash() primaries.append(Payment(change_puzzle_hash, uint64(change))) - message_list: List[bytes32] = [c.name() for c in coins] + message_list: list[bytes32] = [c.name() for c in coins] for primary in primaries: message_list.append(Coin(coin.name(), primary.puzzle_hash, primary.amount).name()) message: bytes32 = std_hash(b"".join(message_list)) @@ -375,7 +375,7 @@ async def _generate_unsigned_transaction( self.log.debug(f"Spends is {spends}") return spends - async def sign_message(self, message: str, puzzle_hash: bytes32, mode: SigningMode) -> Tuple[G1Element, G2Element]: + async def sign_message(self, message: str, puzzle_hash: bytes32, mode: SigningMode) -> tuple[G1Element, G2Element]: # CHIP-0002 message signing as documented at: # https://github.com/Chia-Network/chips/blob/80e4611fe52b174bf1a0382b9dff73805b18b8c6/CHIPs/chip-0002.md#signmessage private = await self.wallet_state_manager.get_private_key(puzzle_hash) @@ -397,11 +397,11 @@ async def generate_signed_transaction( puzzle_hash: bytes32, action_scope: WalletActionScope, fee: uint64 = uint64(0), - coins: Optional[Set[Coin]] = None, - primaries: Optional[List[Payment]] = None, - memos: Optional[List[bytes]] = None, - puzzle_decorator_override: Optional[List[Dict[str, Any]]] = None, - extra_conditions: Tuple[Condition, ...] = tuple(), + coins: Optional[set[Coin]] = None, + primaries: Optional[list[Payment]] = None, + memos: Optional[list[bytes]] = None, + puzzle_decorator_override: Optional[list[dict[str, Any]]] = None, + extra_conditions: tuple[Condition, ...] = tuple(), **kwargs: Unpack[GSTOptionalArgs], ) -> None: origin_id: Optional[bytes32] = kwargs.get("origin_id", None) @@ -434,8 +434,8 @@ async def generate_signed_transaction( spend_bundle = WalletSpendBundle(transaction, G2Element()) now = uint64(int(time.time())) - add_list: List[Coin] = list(spend_bundle.additions()) - rem_list: List[Coin] = list(spend_bundle.removals()) + add_list: list[Coin] = list(spend_bundle.additions()) + rem_list: list[Coin] = list(spend_bundle.removals()) output_amount = sum(a.amount for a in add_list) + fee input_amount = sum(r.amount for r in rem_list) @@ -471,7 +471,7 @@ async def create_tandem_xch_tx( self, fee: uint64, action_scope: WalletActionScope, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> None: chia_coins = await self.select_coins(fee, action_scope) await self.generate_signed_transaction( @@ -488,7 +488,7 @@ async def get_coins_to_offer( asset_id: Optional[bytes32], amount: uint64, action_scope: WalletActionScope, - ) -> Set[Coin]: + ) -> set[Coin]: if asset_id is not None: raise ValueError(f"The standard wallet cannot offer coins with asset id {asset_id}") balance = await self.get_spendable_balance() @@ -557,18 +557,18 @@ async def path_hint_for_pubkey(self, pk: bytes) -> Optional[PathHint]: async def execute_signing_instructions( self, signing_instructions: SigningInstructions, partial_allowed: bool = False - ) -> List[SigningResponse]: + ) -> list[SigningResponse]: root_pubkey: G1Element = self.wallet_state_manager.root_pubkey - pk_lookup: Dict[int, G1Element] = ( + pk_lookup: dict[int, G1Element] = ( {root_pubkey.get_fingerprint(): root_pubkey} if self.wallet_state_manager.private_key is not None else {} ) - sk_lookup: Dict[int, PrivateKey] = ( + sk_lookup: dict[int, PrivateKey] = ( {root_pubkey.get_fingerprint(): self.wallet_state_manager.get_master_private_key()} if self.wallet_state_manager.private_key is not None else {} ) aggregate_responses_at_end: bool = True - responses: List[SigningResponse] = [] + responses: list[SigningResponse] = [] # TODO: expand path hints and sum hints recursively (a sum hint can give a new key to path hint) # Next, expand our pubkey set with path hints @@ -593,9 +593,9 @@ async def execute_signing_instructions( sk_lookup[derive_child_pk_unhardened.get_fingerprint()] = derive_child_sk_unhardened # Next, expand our pubkey set with sum hints - sum_hint_lookup: Dict[int, List[int]] = {} + sum_hint_lookup: dict[int, list[int]] = {} for sum_hint in signing_instructions.key_hints.sum_hints: - fingerprints_we_have: List[int] = [] + fingerprints_we_have: list[int] = [] for fingerprint in sum_hint.fingerprints: fingerprint_as_int = int.from_bytes(fingerprint, "big") if fingerprint_as_int not in pk_lookup: @@ -635,7 +635,7 @@ async def execute_signing_instructions( ) ) else: # Implicit if pk_fingerprint in sum_hint_lookup - signatures: List[G2Element] = [] + signatures: list[G2Element] = [] for partial_fingerprint in sum_hint_lookup[pk_fingerprint]: signatures.append( AugSchemeMPL.sign(sk_lookup[partial_fingerprint], target.message, pk_lookup[pk_fingerprint]) @@ -660,8 +660,8 @@ async def execute_signing_instructions( # If we have the full set of signing responses for the instructions, aggregate them as much as possible if aggregate_responses_at_end: - new_responses: List[SigningResponse] = [] - grouped_responses: Dict[bytes32, List[SigningResponse]] = {} + new_responses: list[SigningResponse] = [] + grouped_responses: dict[bytes32, list[SigningResponse]] = {} for response in responses: grouped_responses.setdefault(response.hook, []) grouped_responses[response.hook].append(response) @@ -677,7 +677,7 @@ async def execute_signing_instructions( return responses async def apply_signatures( - self, spends: List[Spend], signing_responses: List[SigningResponse] + self, spends: list[Spend], signing_responses: list[SigningResponse] ) -> SignedTransaction: signing_responses_set = set(signing_responses) return SignedTransaction( diff --git a/chia/wallet/wallet_action_scope.py b/chia/wallet/wallet_action_scope.py index d728e7bc52e4..ded3b3ae8bc4 100644 --- a/chia/wallet/wallet_action_scope.py +++ b/chia/wallet/wallet_action_scope.py @@ -1,8 +1,9 @@ from __future__ import annotations import contextlib +from collections.abc import AsyncIterator from dataclasses import dataclass, field, replace -from typing import TYPE_CHECKING, AsyncIterator, List, Optional, cast, final +from typing import TYPE_CHECKING, Optional, cast, final from chia.types.blockchain_format.coin import Coin from chia.util.action_scope import ActionScope @@ -20,18 +21,18 @@ @streamable @dataclass(frozen=True) class _StreamableWalletSideEffects(Streamable): - transactions: List[TransactionRecord] - signing_responses: List[SigningResponse] - extra_spends: List[WalletSpendBundle] - selected_coins: List[Coin] + transactions: list[TransactionRecord] + signing_responses: list[SigningResponse] + extra_spends: list[WalletSpendBundle] + selected_coins: list[Coin] @dataclass class WalletSideEffects: - transactions: List[TransactionRecord] = field(default_factory=list) - signing_responses: List[SigningResponse] = field(default_factory=list) - extra_spends: List[WalletSpendBundle] = field(default_factory=list) - selected_coins: List[Coin] = field(default_factory=list) + transactions: list[TransactionRecord] = field(default_factory=list) + signing_responses: list[SigningResponse] = field(default_factory=list) + extra_spends: list[WalletSpendBundle] = field(default_factory=list) + selected_coins: list[Coin] = field(default_factory=list) def __bytes__(self) -> bytes: return bytes(_StreamableWalletSideEffects(**self.__dict__)) @@ -47,8 +48,8 @@ class WalletActionConfig: push: bool merge_spends: bool sign: Optional[bool] - additional_signing_responses: List[SigningResponse] - extra_spends: List[WalletSpendBundle] + additional_signing_responses: list[SigningResponse] + extra_spends: list[WalletSpendBundle] tx_config: TXConfig def adjust_for_side_effects(self, side_effects: WalletSideEffects) -> WalletActionConfig: @@ -71,8 +72,8 @@ async def new_wallet_action_scope( push: bool = False, merge_spends: bool = True, sign: Optional[bool] = None, - additional_signing_responses: List[SigningResponse] = [], - extra_spends: List[WalletSpendBundle] = [], + additional_signing_responses: list[SigningResponse] = [], + extra_spends: list[WalletSpendBundle] = [], ) -> AsyncIterator[WalletActionScope]: async with ActionScope.new_scope( WalletSideEffects, diff --git a/chia/wallet/wallet_blockchain.py b/chia/wallet/wallet_blockchain.py index 028ad71e964e..24e7036c8596 100644 --- a/chia/wallet/wallet_blockchain.py +++ b/chia/wallet/wallet_blockchain.py @@ -1,7 +1,7 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING, ClassVar, Dict, List, Optional, Tuple, cast +from typing import TYPE_CHECKING, ClassVar, Optional, cast from chia.consensus.block_header_validation import validate_finished_header_block from chia.consensus.block_record import BlockRecord @@ -35,8 +35,8 @@ class WalletBlockchain: _finished_sync_up_to: uint32 _peak: Optional[HeaderBlock] - _height_to_hash: Dict[uint32, bytes32] - _block_records: Dict[bytes32, BlockRecord] + _height_to_hash: dict[uint32, bytes32] + _block_records: dict[bytes32, BlockRecord] _latest_timestamp: uint64 _sub_slot_iters: uint64 _difficulty: uint64 @@ -69,7 +69,7 @@ async def create(_basic_store: KeyValStore, constants: ConsensusConstants) -> Wa return self - async def new_valid_weight_proof(self, weight_proof: WeightProof, records: List[BlockRecord]) -> None: + async def new_valid_weight_proof(self, weight_proof: WeightProof, records: list[BlockRecord]) -> None: peak: Optional[HeaderBlock] = await self.get_peak_block() if peak is not None and weight_proof.recent_chain_data[-1].weight <= peak.weight: @@ -93,7 +93,7 @@ async def new_valid_weight_proof(self, weight_proof: WeightProof, records: List[ await self.set_peak_block(weight_proof.recent_chain_data[-1], latest_timestamp) await self.clean_block_records() - async def add_block(self, block: HeaderBlock) -> Tuple[AddBlockResult, Optional[Err]]: + async def add_block(self, block: HeaderBlock) -> tuple[AddBlockResult, Optional[Err]]: if self.contains_block(block.header_hash): return AddBlockResult.ALREADY_HAVE_BLOCK, None if not self.contains_block(block.prev_header_hash) and block.height > 0: @@ -217,7 +217,7 @@ async def get_block_record_from_db(self, header_hash: bytes32) -> Optional[Block # blockchain_interface return self._block_records.get(header_hash) - async def prev_block_hash(self, header_hashes: List[bytes32]) -> List[bytes32]: + async def prev_block_hash(self, header_hashes: list[bytes32]) -> list[bytes32]: ret = [] for h in header_hashes: ret.append(self._block_records[h].prev_hash) @@ -235,7 +235,7 @@ async def clean_block_records(self) -> None: if len(self._block_records) < self.CACHE_SIZE: return None - to_remove: List[bytes32] = [] + to_remove: list[bytes32] = [] for header_hash, block_record in self._block_records.items(): if block_record.height < height_limit: to_remove.append(header_hash) diff --git a/chia/wallet/wallet_coin_record.py b/chia/wallet/wallet_coin_record.py index 581d5246de97..5c554547397c 100644 --- a/chia/wallet/wallet_coin_record.py +++ b/chia/wallet/wallet_coin_record.py @@ -1,7 +1,7 @@ from __future__ import annotations from dataclasses import dataclass, field -from typing import Any, Dict, Optional, Union +from typing import Any, Optional, Union from chia.types.blockchain_format.coin import Coin from chia.types.blockchain_format.sized_bytes import bytes32 @@ -56,7 +56,7 @@ def name(self) -> bytes32: def to_coin_record(self, timestamp: uint64) -> CoinRecord: return CoinRecord(self.coin, self.confirmed_block_height, self.spent_block_height, self.coinbase, timestamp) - def to_json_dict_parsed_metadata(self) -> Dict[str, Any]: + def to_json_dict_parsed_metadata(self) -> dict[str, Any]: # TODO: Merge wallet_type and wallet_id into `wallet_identifier`, make `spent` an attribute based # on `spent_height` make `WalletCoinRecord` streamable and use Streamable.to_json_dict as base here if we have # streamable enums. diff --git a/chia/wallet/wallet_coin_store.py b/chia/wallet/wallet_coin_store.py index 6d113dfc613f..55cab8906749 100644 --- a/chia/wallet/wallet_coin_store.py +++ b/chia/wallet/wallet_coin_store.py @@ -3,7 +3,7 @@ import sqlite3 from dataclasses import dataclass from enum import IntEnum -from typing import Dict, List, Optional, Set +from typing import Optional from chia.types.blockchain_format.coin import Coin from chia.types.blockchain_format.sized_bytes import bytes32 @@ -47,8 +47,8 @@ class GetCoinRecords(Streamable): @dataclass(frozen=True) class GetCoinRecordsResult: - records: List[WalletCoinRecord] - coin_id_to_record: Dict[bytes32, WalletCoinRecord] + records: list[WalletCoinRecord] + coin_id_to_record: dict[bytes32, WalletCoinRecord] total_count: Optional[uint32] @@ -252,8 +252,8 @@ async def get_coin_records( total_count = uint32(row[0]) self.total_count_cache.put(cache_hash, total_count) - records: List[WalletCoinRecord] = [] - coin_id_to_record: Dict[bytes32, WalletCoinRecord] = {} + records: list[WalletCoinRecord] = [] + coin_id_to_record: dict[bytes32, WalletCoinRecord] = {} for row in rows: records.append(self.coin_record_from_row(row)) coin_id_to_record[bytes32.fromhex(row[0])] = records[-1] @@ -266,7 +266,7 @@ async def get_coin_records( async def get_coin_records_between( self, wallet_id: int, start: int, end: int, reverse: bool = False, coin_type: CoinType = CoinType.NORMAL - ) -> List[WalletCoinRecord]: + ) -> list[WalletCoinRecord]: """Return a list of coins between start and end index. List is in reverse chronological order. start = 0 is most recent transaction """ @@ -293,7 +293,7 @@ async def get_first_coin_height(self) -> Optional[uint32]: async def get_unspent_coins_for_wallet( self, wallet_id: int, coin_type: CoinType = CoinType.NORMAL - ) -> Set[WalletCoinRecord]: + ) -> set[WalletCoinRecord]: """Returns set of CoinRecords that have not been spent yet for a wallet.""" async with self.db_wrapper.reader_no_transaction() as conn: rows = await conn.execute_fetchall( @@ -302,7 +302,7 @@ async def get_unspent_coins_for_wallet( ) return {self.coin_record_from_row(row) for row in rows} - async def get_all_unspent_coins(self, coin_type: CoinType = CoinType.NORMAL) -> Set[WalletCoinRecord]: + async def get_all_unspent_coins(self, coin_type: CoinType = CoinType.NORMAL) -> set[WalletCoinRecord]: """Returns set of CoinRecords that have not been spent yet for a wallet.""" async with self.db_wrapper.reader_no_transaction() as conn: rows = await conn.execute_fetchall( @@ -311,7 +311,7 @@ async def get_all_unspent_coins(self, coin_type: CoinType = CoinType.NORMAL) -> return {self.coin_record_from_row(row) for row in rows} # Checks DB and DiffStores for CoinRecords with puzzle_hash and returns them - async def get_coin_records_by_puzzle_hash(self, puzzle_hash: bytes32) -> List[WalletCoinRecord]: + async def get_coin_records_by_puzzle_hash(self, puzzle_hash: bytes32) -> list[WalletCoinRecord]: """Returns a list of all coin records with the given puzzle hash""" async with self.db_wrapper.reader_no_transaction() as conn: rows = await conn.execute_fetchall("SELECT * from coin_record WHERE puzzle_hash=?", (puzzle_hash.hex(),)) @@ -319,7 +319,7 @@ async def get_coin_records_by_puzzle_hash(self, puzzle_hash: bytes32) -> List[Wa return [self.coin_record_from_row(row) for row in rows] # Checks DB and DiffStores for CoinRecords with parent_coin_info and returns them - async def get_coin_records_by_parent_id(self, parent_coin_info: bytes32) -> List[WalletCoinRecord]: + async def get_coin_records_by_parent_id(self, parent_coin_info: bytes32) -> list[WalletCoinRecord]: """Returns a list of all coin records with the given parent id""" async with self.db_wrapper.reader_no_transaction() as conn: rows = await conn.execute_fetchall( diff --git a/chia/wallet/wallet_info.py b/chia/wallet/wallet_info.py index 7cb1fb9f424e..59cd5c291b89 100644 --- a/chia/wallet/wallet_info.py +++ b/chia/wallet/wallet_info.py @@ -1,7 +1,6 @@ from __future__ import annotations from dataclasses import dataclass -from typing import List from chia.util.ints import uint8, uint32 from chia.util.streamable import Streamable, streamable @@ -33,4 +32,4 @@ class WalletInfoBackup(Streamable): Used for transforming list of WalletInfo objects into bytes. """ - wallet_list: List[WalletInfo] + wallet_list: list[WalletInfo] diff --git a/chia/wallet/wallet_interested_store.py b/chia/wallet/wallet_interested_store.py index a52cdcfd28e5..e9eb14bc21c6 100644 --- a/chia/wallet/wallet_interested_store.py +++ b/chia/wallet/wallet_interested_store.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import List, Optional, Tuple +from typing import Optional from chia.protocols.wallet_protocol import CoinState from chia.types.blockchain_format.sized_bytes import bytes32 @@ -38,7 +38,7 @@ async def create(cls, wrapper: DBWrapper2): return self - async def get_interested_coin_ids(self) -> List[bytes32]: + async def get_interested_coin_ids(self) -> list[bytes32]: async with self.db_wrapper.writer_maybe_transaction() as conn: cursor = await conn.execute("SELECT coin_name FROM interested_coins") rows_hex = await cursor.fetchall() @@ -54,7 +54,7 @@ async def remove_interested_coin_id(self, coin_id: bytes32) -> None: cursor = await conn.execute("DELETE FROM interested_coins WHERE coin_name=?", (coin_id.hex(),)) await cursor.close() - async def get_interested_puzzle_hashes(self) -> List[Tuple[bytes32, int]]: + async def get_interested_puzzle_hashes(self) -> list[tuple[bytes32, int]]: async with self.db_wrapper.reader_no_transaction() as conn: cursor = await conn.execute("SELECT puzzle_hash, wallet_id FROM interested_puzzle_hashes") rows_hex = await cursor.fetchall() @@ -111,7 +111,7 @@ async def add_unacknowledged_token( ) await cursor.close() - async def get_unacknowledged_tokens(self) -> List: + async def get_unacknowledged_tokens(self) -> list: """ Get a list of all unacknowledged CATs :return: A json style list of unacknowledged CATs @@ -147,7 +147,7 @@ async def add_unacknowledged_coin_state( ) await cursor.close() - async def get_unacknowledged_states_for_asset_id(self, asset_id: bytes32) -> List[Tuple[CoinState, uint32]]: + async def get_unacknowledged_states_for_asset_id(self, asset_id: bytes32) -> list[tuple[CoinState, uint32]]: """ Return all states for a particular asset ID that were ignored :param asset_id: CAT asset ID diff --git a/chia/wallet/wallet_nft_store.py b/chia/wallet/wallet_nft_store.py index eeb99384bb21..1d851b99648b 100644 --- a/chia/wallet/wallet_nft_store.py +++ b/chia/wallet/wallet_nft_store.py @@ -3,7 +3,7 @@ import json import logging from sqlite3 import Row -from typing import List, Optional, Type, TypeVar, Union +from typing import Optional, TypeVar, Union from chia.types.blockchain_format.coin import Coin from chia.types.blockchain_format.program import Program @@ -41,7 +41,7 @@ class WalletNftStore: db_wrapper: DBWrapper2 @classmethod - async def create(cls: Type[_T_WalletNftStore], db_wrapper: DBWrapper2) -> _T_WalletNftStore: + async def create(cls: type[_T_WalletNftStore], db_wrapper: DBWrapper2) -> _T_WalletNftStore: self = cls() self.db_wrapper = db_wrapper async with self.db_wrapper.writer_maybe_transaction() as conn: @@ -144,7 +144,7 @@ async def save_nft(self, wallet_id: uint32, did_id: Optional[bytes32], nft_coin_ async def count(self, wallet_id: Optional[uint32] = None, did_id: Optional[bytes32] = None) -> int: sql = "SELECT COUNT(nft_id) FROM users_nfts WHERE removed_height is NULL" - params: List[Union[uint32, bytes32]] = [] + params: list[Union[uint32, bytes32]] = [] if wallet_id is not None: sql += " AND wallet_id=?" params.append(wallet_id) @@ -159,7 +159,7 @@ async def count(self, wallet_id: Optional[uint32] = None, did_id: Optional[bytes async def is_empty(self, wallet_id: Optional[uint32] = None) -> bool: sql = "SELECT 1 FROM users_nfts WHERE removed_height is NULL" - params: List[Union[uint32, bytes32]] = [] + params: list[Union[uint32, bytes32]] = [] if wallet_id is not None: sql += " AND wallet_id=?" params.append(wallet_id) @@ -176,7 +176,7 @@ async def get_nft_list( did_id: Optional[bytes32] = None, start_index: int = 0, count: int = 50, - ) -> List[NFTCoinInfo]: + ) -> list[NFTCoinInfo]: try: start_index = int(start_index) except ValueError: @@ -240,7 +240,7 @@ async def get_nft_by_coin_id(self, nft_coin_id: bytes32) -> Optional[NFTCoinInfo async def get_nft_by_id(self, nft_id: bytes32, wallet_id: Optional[uint32] = None) -> Optional[NFTCoinInfo]: async with self.db_wrapper.reader_no_transaction() as conn: sql = f"SELECT {NFT_COIN_INFO_COLUMNS} from users_nfts WHERE removed_height is NULL and nft_id=?" - params: List[Union[uint32, str]] = [nft_id.hex()] + params: list[Union[uint32, str]] = [nft_id.hex()] if wallet_id: sql += " and wallet_id=?" params.append(wallet_id) diff --git a/chia/wallet/wallet_node.py b/chia/wallet/wallet_node.py index cbbb898e4125..6a220e009e06 100644 --- a/chia/wallet/wallet_node.py +++ b/chia/wallet/wallet_node.py @@ -9,22 +9,9 @@ import sys import time import traceback +from collections.abc import AsyncIterator from pathlib import Path -from typing import ( - TYPE_CHECKING, - Any, - AsyncIterator, - ClassVar, - Dict, - List, - Literal, - Optional, - Set, - Tuple, - Union, - cast, - overload, -) +from typing import TYPE_CHECKING, Any, ClassVar, Literal, Optional, Union, cast, overload import aiosqlite from chia_rs import AugSchemeMPL, G1Element, G2Element, PrivateKey @@ -88,7 +75,7 @@ from chia.wallet.wallet_weight_proof_handler import WalletWeightProofHandler, get_wp_fork_point -def get_wallet_db_path(root_path: Path, config: Dict[str, Any], key_fingerprint: str) -> Path: +def get_wallet_db_path(root_path: Path, config: dict[str, Any], key_fingerprint: str) -> Path: """ Construct a path to the wallet db. Uses config values and the wallet key's fingerprint to determine the wallet db filename. @@ -124,7 +111,7 @@ class WalletNode: _protocol_check: ClassVar[RpcServiceProtocol] = cast("WalletNode", None) - config: Dict[str, Any] + config: dict[str, Any] root_path: Path constants: ConsensusConstants local_keychain: Optional[Keychain] = None @@ -140,11 +127,11 @@ class WalletNode: logged_in_fingerprint: Optional[int] = None logged_in: bool = False _keychain_proxy: Optional[KeychainProxy] = None - _balance_cache: Dict[int, Balance] = dataclasses.field(default_factory=dict) + _balance_cache: dict[int, Balance] = dataclasses.field(default_factory=dict) # Peers that we have long synced to - synced_peers: Set[bytes32] = dataclasses.field(default_factory=set) + synced_peers: set[bytes32] = dataclasses.field(default_factory=set) wallet_peers: Optional[WalletPeers] = None - peer_caches: Dict[bytes32, PeerRequestCache] = dataclasses.field(default_factory=dict) + peer_caches: dict[bytes32, PeerRequestCache] = dataclasses.field(default_factory=dict) validation_semaphore: Optional[asyncio.Semaphore] = None local_node_synced: bool = False LONG_SYNC_THRESHOLD: int = 300 @@ -158,7 +145,7 @@ class WalletNode: _process_new_subscriptions_task: Optional[asyncio.Task[None]] = None _retry_failed_states_task: Optional[asyncio.Task[None]] = None _secondary_peer_sync_task: Optional[asyncio.Task[None]] = None - _tx_messages_in_progress: Dict[bytes32, List[bytes32]] = dataclasses.field(default_factory=dict) + _tx_messages_in_progress: dict[bytes32, list[bytes32]] = dataclasses.field(default_factory=dict) @contextlib.asynccontextmanager async def manage(self) -> AsyncIterator[None]: @@ -205,7 +192,7 @@ def new_peak_queue(self) -> NewPeakQueue: return self._new_peak_queue - def get_connections(self, request_node_type: Optional[NodeType]) -> List[Dict[str, Any]]: + def get_connections(self, request_node_type: Optional[NodeType]) -> list[dict[str, Any]]: return default_get_connections(server=self.server, request_node_type=request_node_type) async def ensure_keychain_proxy(self) -> KeychainProxy: @@ -306,7 +293,7 @@ def set_resync_on_startup(self, fingerprint: int, enabled: bool = True) -> None: self.log.info("Disabled resync for wallet fingerprint: %s", fingerprint) save_config(self.root_path, "config.yaml", config) - def set_auto_claim(self, auto_claim_config: AutoClaimSettings) -> Dict[str, Any]: + def set_auto_claim(self, auto_claim_config: AutoClaimSettings) -> dict[str, Any]: if auto_claim_config.batch_size < 1: auto_claim_config = dataclasses.replace(auto_claim_config, batch_size=uint16(50)) auto_claim_config_json = auto_claim_config.to_json_dict() @@ -554,17 +541,17 @@ async def _resend_queue(self) -> None: self._tx_messages_in_progress.setdefault(peer.peer_node_id, []) self._tx_messages_in_progress[peer.peer_node_id].append(msg_name) - async def _messages_to_resend(self) -> List[Tuple[Message, Set[bytes32]]]: + async def _messages_to_resend(self) -> list[tuple[Message, set[bytes32]]]: if self._wallet_state_manager is None or self._shut_down: return [] - messages: List[Tuple[Message, Set[bytes32]]] = [] + messages: list[tuple[Message, set[bytes32]]] = [] current_time = int(time.time()) retry_accepted_txs = False if self.last_wallet_tx_resend_time < current_time - self.wallet_tx_resend_timeout_secs: self.last_wallet_tx_resend_time = current_time retry_accepted_txs = True - records: List[TransactionRecord] = await self.wallet_state_manager.tx_store.get_not_sent( + records: list[TransactionRecord] = await self.wallet_state_manager.tx_store.get_not_sent( include_accepted_txs=retry_accepted_txs ) @@ -626,15 +613,15 @@ async def _process_new_subscriptions(self) -> None: # Subscriptions are the highest priority, because we don't want to process any more peaks or # state updates until we are sure that we subscribed to everything that we need to. Otherwise, # we might not be able to process some state. - coin_ids: List[bytes32] = item.data + coin_ids: list[bytes32] = item.data for peer in self.server.get_connections(NodeType.FULL_NODE): - coin_states: List[CoinState] = await subscribe_to_coin_updates(coin_ids, peer, 0) + coin_states: list[CoinState] = await subscribe_to_coin_updates(coin_ids, peer, 0) if len(coin_states) > 0: async with self.wallet_state_manager.lock: await self.add_states_from_peer(coin_states, peer) elif item.item_type == NewPeakQueueTypes.PUZZLE_HASH_SUBSCRIPTION: self.log.debug("Pulled from queue: %s %s", item.item_type.name, item.data) - puzzle_hashes: List[bytes32] = item.data + puzzle_hashes: list[bytes32] = item.data for peer in self.server.get_connections(NodeType.FULL_NODE): # Puzzle hash subscription coin_states = await subscribe_to_phs(puzzle_hashes, peer, 0) @@ -839,7 +826,7 @@ def is_new_state_update(cs: CoinState) -> bool: # Things, so we don't have to reprocess these later. There can be many things in ph_update_res. use_delta_sync = self.config.get("use_delta_sync", False) min_height_for_subscriptions = fork_height if use_delta_sync else 0 - already_checked_ph: Set[bytes32] = set() + already_checked_ph: set[bytes32] = set() while not self._shut_down: await self.wallet_state_manager.create_more_puzzle_hashes() all_puzzle_hashes = await self.get_puzzle_hashes_to_subscribe() @@ -847,7 +834,7 @@ def is_new_state_update(cs: CoinState) -> bool: if not_checked_puzzle_hashes == set(): break for batch in to_batches(not_checked_puzzle_hashes, 1000): - ph_update_res: List[CoinState] = await subscribe_to_phs( + ph_update_res: list[CoinState] = await subscribe_to_phs( batch.entries, full_node, min_height_for_subscriptions ) ph_update_res = list(filter(is_new_state_update, ph_update_res)) @@ -860,14 +847,14 @@ def is_new_state_update(cs: CoinState) -> bool: # The number of coin id updates are usually going to be significantly less than ph updates, so we can # sync from 0 every time. - already_checked_coin_ids: Set[bytes32] = set() + already_checked_coin_ids: set[bytes32] = set() while not self._shut_down: all_coin_ids = await self.get_coin_ids_to_subscribe() not_checked_coin_ids = set(all_coin_ids) - already_checked_coin_ids if not_checked_coin_ids == set(): break for batch in to_batches(not_checked_coin_ids, 1000): - c_update_res: List[CoinState] = await subscribe_to_coin_updates( + c_update_res: list[CoinState] = await subscribe_to_coin_updates( batch.entries, full_node, min_height_for_subscriptions ) @@ -892,7 +879,7 @@ def is_new_state_update(cs: CoinState) -> bool: async def add_states_from_peer( self, - items_input: List[CoinState], + items_input: list[CoinState], peer: WSChiaConnection, fork_height: Optional[uint32] = None, height: Optional[uint32] = None, @@ -932,7 +919,7 @@ async def add_states_from_peer( # Rollback race_cache not in clear_after_height to avoid applying rollbacks from new peak processing cache.rollback_race_cache(fork_height=fork_height) - all_tasks: List[asyncio.Task[None]] = [] + all_tasks: list[asyncio.Task[None]] = [] target_concurrent_tasks: int = 30 # Ensure the list is sorted @@ -943,7 +930,7 @@ async def add_states_from_peer( if num_filtered > 0: self.log.info(f"Filtered {num_filtered} spam transactions") - async def validate_and_add(inner_states: List[CoinState], inner_idx_start: int) -> None: + async def validate_and_add(inner_states: list[CoinState], inner_idx_start: int) -> None: try: assert self.validation_semaphore is not None async with self.validation_semaphore: @@ -1046,12 +1033,12 @@ def get_full_node_peer(self) -> WSChiaConnection: """ Get a full node, preferring synced & trusted > synced & untrusted > unsynced & trusted > unsynced & untrusted """ - full_nodes: List[WSChiaConnection] = self.get_full_node_peers_in_order() + full_nodes: list[WSChiaConnection] = self.get_full_node_peers_in_order() if len(full_nodes) == 0: raise ValueError("No peer connected") return full_nodes[0] - def get_full_node_peers_in_order(self) -> List[WSChiaConnection]: + def get_full_node_peers_in_order(self) -> list[WSChiaConnection]: """ Get all full nodes sorted: preferring synced & trusted > synced & untrusted > unsynced & trusted > unsynced & untrusted @@ -1059,11 +1046,11 @@ def get_full_node_peers_in_order(self) -> List[WSChiaConnection]: if self._server is None: return [] - synced_and_trusted: List[WSChiaConnection] = [] - synced: List[WSChiaConnection] = [] - trusted: List[WSChiaConnection] = [] - neither: List[WSChiaConnection] = [] - all_nodes: List[WSChiaConnection] = self.server.get_connections(NodeType.FULL_NODE) + synced_and_trusted: list[WSChiaConnection] = [] + synced: list[WSChiaConnection] = [] + trusted: list[WSChiaConnection] = [] + neither: list[WSChiaConnection] = [] + all_nodes: list[WSChiaConnection] = self.server.get_connections(NodeType.FULL_NODE) random.shuffle(all_nodes) for node in all_nodes: we_synced_to_it = node.peer_node_id in self.synced_peers @@ -1092,7 +1079,7 @@ async def get_timestamp_for_height_from_peer(self, height: uint32, peer: WSChiaC block = cache.get_block(uint32(request_height)) if block is None: self.log.debug(f"get_timestamp_for_height_from_peer cache miss for height {request_height}") - response: Optional[List[HeaderBlock]] = await request_header_blocks( + response: Optional[list[HeaderBlock]] = await request_header_blocks( peer, uint32(request_height), uint32(request_height) ) if response is not None and len(response) > 0: @@ -1263,10 +1250,10 @@ async def sync_from_untrusted_close_to_peak(self, new_peak_hb: HeaderBlock, peer # Edge case, this happens when the peak < WEIGHT_PROOF_RECENT_BLOCKS # we still want to subscribe for all phs and coins. # (Hints are not in filter) - all_coin_ids: List[bytes32] = await self.get_coin_ids_to_subscribe() - phs: List[bytes32] = await self.get_puzzle_hashes_to_subscribe() - ph_updates: List[CoinState] = await subscribe_to_phs(phs, peer, min_height_for_subscriptions) - coin_updates: List[CoinState] = await subscribe_to_coin_updates( + all_coin_ids: list[bytes32] = await self.get_coin_ids_to_subscribe() + phs: list[bytes32] = await self.get_puzzle_hashes_to_subscribe() + ph_updates: list[CoinState] = await subscribe_to_phs(phs, peer, min_height_for_subscriptions) + coin_updates: list[CoinState] = await subscribe_to_coin_updates( all_coin_ids, peer, min_height_for_subscriptions ) success = await self.add_states_from_peer( @@ -1373,7 +1360,7 @@ async def fetch_and_update_weight_proof(self, peer: WSChiaConnection, peak: Head return get_wp_fork_point(self.constants, old_proof, weight_proof) - async def get_puzzle_hashes_to_subscribe(self) -> List[bytes32]: + async def get_puzzle_hashes_to_subscribe(self) -> list[bytes32]: all_puzzle_hashes = await self.wallet_state_manager.puzzle_store.get_all_puzzle_hashes(1) # Get all phs from interested store interested_puzzle_hashes = [ @@ -1382,7 +1369,7 @@ async def get_puzzle_hashes_to_subscribe(self) -> List[bytes32]: all_puzzle_hashes.update(interested_puzzle_hashes) return list(all_puzzle_hashes) - async def get_coin_ids_to_subscribe(self) -> List[bytes32]: + async def get_coin_ids_to_subscribe(self) -> list[bytes32]: coin_ids = await self.wallet_state_manager.trade_manager.get_coins_of_interest() coin_ids.update(await self.wallet_state_manager.interested_store.get_interested_coin_ids()) return list(coin_ids) @@ -1475,7 +1462,7 @@ async def validate_received_state_from_peer( # Peer is telling us that coin that was previously known to be spent is not spent anymore # Check old state - spent_state_blocks: Optional[List[HeaderBlock]] = await request_header_blocks( + spent_state_blocks: Optional[list[HeaderBlock]] = await request_header_blocks( peer, current.spent_block_height, current.spent_block_height ) if spent_state_blocks is None: @@ -1601,7 +1588,7 @@ async def validate_block_inclusion( return False all_peers_c = self.server.get_connections(NodeType.FULL_NODE) all_peers = [(con, self.is_trusted(con)) for con in all_peers_c] - blocks: Optional[List[HeaderBlock]] = await fetch_header_blocks_in_range( + blocks: Optional[list[HeaderBlock]] = await fetch_header_blocks_in_range( start, end, peer_request_cache, all_peers ) if blocks is None: @@ -1618,9 +1605,9 @@ async def validate_block_inclusion( if last != weight_proof.sub_epochs[inserted].reward_chain_hash: self.log.error("Failed validation 4") return False - pk_m_sig: List[Tuple[G1Element, bytes32, G2Element]] = [] - sigs_to_cache: List[HeaderBlock] = [] - blocks_to_cache: List[Tuple[bytes32, uint32]] = [] + pk_m_sig: list[tuple[G1Element, bytes32, G2Element]] = [] + sigs_to_cache: list[HeaderBlock] = [] + blocks_to_cache: list[tuple[bytes32, uint32]] = [] signatures_to_validate: int = 30 for idx in range(len(blocks)): @@ -1684,8 +1671,8 @@ async def validate_block_inclusion( return True async def get_coin_state( - self, coin_names: List[bytes32], peer: WSChiaConnection, fork_height: Optional[uint32] = None - ) -> List[CoinState]: + self, coin_names: list[bytes32], peer: WSChiaConnection, fork_height: Optional[uint32] = None + ) -> list[CoinState]: msg = RegisterForCoinUpdates(coin_names, uint32(0)) coin_state: Optional[RespondToCoinUpdates] = await peer.call_api(FullNodeAPI.register_interest_in_coin, msg) if coin_state is None or not isinstance(coin_state, RespondToCoinUpdates): @@ -1709,7 +1696,7 @@ async def get_coin_state( async def fetch_children( self, coin_name: bytes32, peer: WSChiaConnection, fork_height: Optional[uint32] = None - ) -> List[CoinState]: + ) -> list[CoinState]: response: Optional[RespondChildren] = await peer.call_api( FullNodeAPI.request_children, RequestChildren(coin_name) ) @@ -1747,7 +1734,7 @@ async def _update_balance_cache(self, wallet_id: uint32) -> None: pending_change = await wallet.get_pending_change_balance() max_send_amount = await wallet.get_max_send_amount(unspent_records) - unconfirmed_removals: Dict[bytes32, Coin] = await wallet.wallet_state_manager.unconfirmed_removals_for_wallet( + unconfirmed_removals: dict[bytes32, Coin] = await wallet.wallet_state_manager.unconfirmed_removals_for_wallet( wallet_id ) self._balance_cache[wallet_id] = Balance( diff --git a/chia/wallet/wallet_pool_store.py b/chia/wallet/wallet_pool_store.py index 5845d6ed8fd5..aa86d33eb770 100644 --- a/chia/wallet/wallet_pool_store.py +++ b/chia/wallet/wallet_pool_store.py @@ -1,7 +1,6 @@ from __future__ import annotations import logging -from typing import List, Tuple from chia.types.coin_spend import CoinSpend from chia.util.db_wrapper import DBWrapper2 @@ -89,7 +88,7 @@ async def add_spend( ) await cursor.close() - async def get_spends_for_wallet(self, wallet_id: int) -> List[Tuple[uint32, CoinSpend]]: + async def get_spends_for_wallet(self, wallet_id: int) -> list[tuple[uint32, CoinSpend]]: """ Retrieves all entries for a wallet ID. """ diff --git a/chia/wallet/wallet_protocol.py b/chia/wallet/wallet_protocol.py index 1a597f898024..93c569f731a9 100644 --- a/chia/wallet/wallet_protocol.py +++ b/chia/wallet/wallet_protocol.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import TYPE_CHECKING, List, Optional, Set, Tuple, TypeVar +from typing import TYPE_CHECKING, Optional, TypeVar from chia_rs import G1Element from typing_extensions import NotRequired, Protocol, TypedDict @@ -35,17 +35,17 @@ async def select_coins( self, amount: uint64, action_scope: WalletActionScope, - ) -> Set[Coin]: ... + ) -> set[Coin]: ... - async def get_confirmed_balance(self, record_list: Optional[Set[WalletCoinRecord]] = None) -> uint128: ... + async def get_confirmed_balance(self, record_list: Optional[set[WalletCoinRecord]] = None) -> uint128: ... - async def get_unconfirmed_balance(self, unspent_records: Optional[Set[WalletCoinRecord]] = None) -> uint128: ... + async def get_unconfirmed_balance(self, unspent_records: Optional[set[WalletCoinRecord]] = None) -> uint128: ... - async def get_spendable_balance(self, unspent_records: Optional[Set[WalletCoinRecord]] = None) -> uint128: ... + async def get_spendable_balance(self, unspent_records: Optional[set[WalletCoinRecord]] = None) -> uint128: ... async def get_pending_change_balance(self) -> uint64: ... - async def get_max_send_amount(self, records: Optional[Set[WalletCoinRecord]] = None) -> uint128: ... + async def get_max_send_amount(self, records: Optional[set[WalletCoinRecord]] = None) -> uint128: ... # not all wallet supports this. To signal support, make # require_derivation_paths() return true @@ -71,14 +71,14 @@ class GSTOptionalArgs(TypedDict): add_pending_singleton: NotRequired[bool] announce_new_state: NotRequired[bool] # CATWallet - cat_discrepancy: NotRequired[Optional[Tuple[int, Program, Program]]] + cat_discrepancy: NotRequired[Optional[tuple[int, Program, Program]]] # NFTWallet nft_coin: NotRequired[Optional[NFTCoinInfo]] new_owner: NotRequired[Optional[bytes]] new_did_inner_hash: NotRequired[Optional[bytes]] trade_prices_list: NotRequired[Optional[Program]] - additional_bundles: NotRequired[List[WalletSpendBundle]] - metadata_update: NotRequired[Optional[Tuple[str, str]]] + additional_bundles: NotRequired[list[WalletSpendBundle]] + metadata_update: NotRequired[Optional[tuple[str, str]]] # CR-CAT Wallet add_authorizations_to_cr_cats: NotRequired[bool] # VCWallet diff --git a/chia/wallet/wallet_puzzle_store.py b/chia/wallet/wallet_puzzle_store.py index 89f04c6cc48c..f4e3d3cbdad1 100644 --- a/chia/wallet/wallet_puzzle_store.py +++ b/chia/wallet/wallet_puzzle_store.py @@ -2,7 +2,7 @@ import asyncio import logging -from typing import Dict, List, Optional, Set +from typing import Optional from chia_rs import G1Element @@ -27,7 +27,7 @@ class WalletPuzzleStore: db_wrapper: DBWrapper2 wallet_identifier_cache: LRUCache # maps wallet_id -> last_derivation_index - last_wallet_derivation_index: Dict[uint32, uint32] + last_wallet_derivation_index: dict[uint32, uint32] last_derivation_index: Optional[uint32] @classmethod @@ -68,7 +68,7 @@ async def create(cls, db_wrapper: DBWrapper2): self.last_wallet_derivation_index = {} return self - async def add_derivation_paths(self, records: List[DerivationRecord]) -> None: + async def add_derivation_paths(self, records: list[DerivationRecord]) -> None: """ Insert many derivation paths into the database. """ @@ -267,7 +267,7 @@ async def get_wallet_identifier_for_puzzle_hash(self, puzzle_hash: bytes32) -> O return None - async def get_all_puzzle_hashes(self, wallet_id: Optional[int] = None) -> Set[bytes32]: + async def get_all_puzzle_hashes(self, wallet_id: Optional[int] = None) -> set[bytes32]: """ Return a set containing all puzzle_hashes we generated. """ diff --git a/chia/wallet/wallet_retry_store.py b/chia/wallet/wallet_retry_store.py index 71a9241f88d4..a2a6d190d549 100644 --- a/chia/wallet/wallet_retry_store.py +++ b/chia/wallet/wallet_retry_store.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import List, Optional, Tuple +from typing import Optional from chia_rs import CoinState @@ -30,7 +30,7 @@ async def create(cls, db_wrapper: DBWrapper2) -> WalletRetryStore: return self - async def get_all_states_to_retry(self) -> List[Tuple[CoinState, bytes32, uint32]]: + async def get_all_states_to_retry(self) -> list[tuple[CoinState, bytes32, uint32]]: """ Return all states that were failed to sync """ diff --git a/chia/wallet/wallet_singleton_store.py b/chia/wallet/wallet_singleton_store.py index f6843a544906..325aaec0be83 100644 --- a/chia/wallet/wallet_singleton_store.py +++ b/chia/wallet/wallet_singleton_store.py @@ -3,7 +3,7 @@ import json import logging from sqlite3 import Row -from typing import List, Optional, Type, TypeVar, Union +from typing import Optional, TypeVar, Union from clvm.casts import int_from_bytes @@ -28,7 +28,7 @@ class WalletSingletonStore: db_wrapper: DBWrapper2 @classmethod - async def create(cls: Type[_T_WalletSingletonStore], wrapper: DBWrapper2) -> _T_WalletSingletonStore: + async def create(cls: type[_T_WalletSingletonStore], wrapper: DBWrapper2) -> _T_WalletSingletonStore: self = cls() self.db_wrapper = wrapper @@ -183,7 +183,7 @@ async def update_pending_transaction(self, coin_id: bytes32, pending: bool) -> b ) return c.rowcount > 0 - async def get_records_by_wallet_id(self, wallet_id: int) -> List[SingletonRecord]: + async def get_records_by_wallet_id(self, wallet_id: int) -> list[SingletonRecord]: """ Retrieves all entries for a wallet ID. """ @@ -195,7 +195,7 @@ async def get_records_by_wallet_id(self, wallet_id: int) -> List[SingletonRecord ) return [self._to_singleton_record(row) for row in rows] - async def get_records_by_coin_id(self, coin_id: bytes32) -> List[SingletonRecord]: + async def get_records_by_coin_id(self, coin_id: bytes32) -> list[SingletonRecord]: """ Retrieves all entries for a coin ID. """ @@ -207,7 +207,7 @@ async def get_records_by_coin_id(self, coin_id: bytes32) -> List[SingletonRecord ) return [self._to_singleton_record(row) for row in rows] - async def get_records_by_singleton_id(self, singleton_id: bytes32) -> List[SingletonRecord]: + async def get_records_by_singleton_id(self, singleton_id: bytes32) -> list[SingletonRecord]: """ Retrieves all entries for a singleton ID. """ @@ -234,7 +234,7 @@ async def rollback(self, height: int, wallet_id_arg: int) -> None: async def count(self, wallet_id: Optional[uint32] = None) -> int: sql = "SELECT COUNT(singleton_id) FROM singletons WHERE removed_height=0" - params: List[uint32] = [] + params: list[uint32] = [] if wallet_id is not None: sql += " AND wallet_id=?" params.append(wallet_id) @@ -246,7 +246,7 @@ async def count(self, wallet_id: Optional[uint32] = None) -> int: async def is_empty(self, wallet_id: Optional[uint32] = None) -> bool: sql = "SELECT 1 FROM singletons WHERE removed_height=0" - params: List[Union[uint32, bytes32]] = [] + params: list[Union[uint32, bytes32]] = [] if wallet_id is not None: sql += " AND wallet_id=?" params.append(wallet_id) diff --git a/chia/wallet/wallet_spend_bundle.py b/chia/wallet/wallet_spend_bundle.py index 44eca406b3e7..fa791a8fe9a7 100644 --- a/chia/wallet/wallet_spend_bundle.py +++ b/chia/wallet/wallet_spend_bundle.py @@ -1,7 +1,5 @@ from __future__ import annotations -from typing import List - from chia_rs import AugSchemeMPL, G2Element from chia.consensus.default_constants import DEFAULT_CONSTANTS @@ -13,9 +11,9 @@ class WalletSpendBundle(SpendBundle): @classmethod - def aggregate(cls, spend_bundles: List[T_SpendBundle]) -> WalletSpendBundle: - coin_spends: List[CoinSpend] = [] - sigs: List[G2Element] = [] + def aggregate(cls, spend_bundles: list[T_SpendBundle]) -> WalletSpendBundle: + coin_spends: list[CoinSpend] = [] + sigs: list[G2Element] = [] for bundle in spend_bundles: coin_spends += bundle.coin_spends sigs.append(bundle.aggregated_signature) diff --git a/chia/wallet/wallet_state_manager.py b/chia/wallet/wallet_state_manager.py index 8d87a46d4ab1..f614287bbb62 100644 --- a/chia/wallet/wallet_state_manager.py +++ b/chia/wallet/wallet_state_manager.py @@ -7,23 +7,10 @@ import multiprocessing.context import time import traceback +from collections.abc import AsyncIterator, Iterator from contextlib import asynccontextmanager from pathlib import Path -from typing import ( - TYPE_CHECKING, - Any, - AsyncIterator, - Callable, - Dict, - Iterator, - List, - Optional, - Set, - Tuple, - Type, - TypeVar, - Union, -) +from typing import TYPE_CHECKING, Any, Callable, Optional, TypeVar, Union import aiosqlite from chia_rs import AugSchemeMPL, G1Element, G2Element, PrivateKey @@ -168,10 +155,10 @@ class WalletStateManager: - interested_ph_cache: Dict[bytes32, List[int]] = {} - interested_coin_cache: Dict[bytes32, List[int]] = {} + interested_ph_cache: dict[bytes32, list[int]] = {} + interested_coin_cache: dict[bytes32, list[int]] = {} constants: ConsensusConstants - config: Dict[str, Any] + config: dict[str, Any] tx_store: WalletTransactionStore puzzle_store: WalletPuzzleStore user_store: WalletUserStore @@ -193,7 +180,7 @@ class WalletStateManager: db_wrapper: DBWrapper2 main_wallet: Wallet - wallets: Dict[uint32, WalletProtocol[Any]] + wallets: dict[uint32, WalletProtocol[Any]] private_key: Optional[PrivateKey] root_pubkey: G1Element @@ -209,15 +196,15 @@ class WalletStateManager: wallet_node: WalletNode pool_store: WalletPoolStore dl_store: DataLayerStore - default_cats: Dict[str, Any] - asset_to_wallet_map: Dict[AssetType, Any] + default_cats: dict[str, Any] + asset_to_wallet_map: dict[AssetType, Any] initial_num_public_keys: int decorator_manager: PuzzleDecoratorManager @staticmethod async def create( private_key: Optional[PrivateKey], - config: Dict[str, Any], + config: dict[str, Any], db_path: Path, constants: ConsensusConstants, server: ChiaServer, @@ -392,7 +379,7 @@ def get_master_private_key(self) -> PrivateKey: return self.private_key - def get_wallet(self, id: uint32, required_type: Type[TWalletType]) -> TWalletType: + def get_wallet(self, id: uint32, required_type: type[TWalletType]) -> TWalletType: wallet = self.wallets[id] if not isinstance(wallet, required_type): raise Exception( @@ -431,7 +418,7 @@ async def create_more_puzzle_hashes( # iterate all wallets that need derived keys and establish the start # index for all of them - start_index_by_wallet: Dict[uint32, int] = {} + start_index_by_wallet: dict[uint32, int] = {} last_index = unused + to_generate for wallet_id in targets: target_wallet = self.wallets[wallet_id] @@ -457,8 +444,8 @@ async def create_more_puzzle_hashes( # now derive the keysfrom lowest_start_index to last_index # these maps derivation index to public key - hardened_keys: Dict[int, G1Element] = {} - unhardened_keys: Dict[int, G1Element] = {} + hardened_keys: dict[int, G1Element] = {} + unhardened_keys: dict[int, G1Element] = {} if self.private_key is not None: # Hardened @@ -476,7 +463,7 @@ async def create_more_puzzle_hashes( assert target_wallet.type() != WalletType.POOLING_WALLET assert start_index < last_index - derivation_paths: List[DerivationRecord] = [] + derivation_paths: list[DerivationRecord] = [] creating_msg = f"Creating puzzle hashes from {start_index} to {last_index - 1} for wallet_id: {wallet_id}" self.log.info(f"Start: {creating_msg}") for index in range(start_index, last_index): @@ -527,7 +514,7 @@ async def create_more_puzzle_hashes( await self.puzzle_store.set_used_up_to(uint32(unused - 1)) async def update_wallet_puzzle_hashes(self, wallet_id: uint32) -> None: - derivation_paths: List[DerivationRecord] = [] + derivation_paths: list[DerivationRecord] = [] target_wallet = self.wallets[wallet_id] last: Optional[uint32] = await self.puzzle_store.get_last_derivation_path_for_wallet(wallet_id) unused: Optional[uint32] = await self.puzzle_store.get_unused_derivation_path() @@ -606,14 +593,14 @@ def set_pending_callback(self, callback: PendingTxCallback) -> None: self.pending_tx_callback = callback def state_changed( - self, state: str, wallet_id: Optional[int] = None, data_object: Optional[Dict[str, Any]] = None + self, state: str, wallet_id: Optional[int] = None, data_object: Optional[dict[str, Any]] = None ) -> None: """ Calls the callback if it's present. """ if self.state_changed_callback is None: return None - change_data: Dict[str, Any] = {"state": state} + change_data: dict[str, Any] = {"state": state} if wallet_id is not None: change_data["wallet_id"] = wallet_id if data_object is not None: @@ -687,13 +674,13 @@ async def set_sync_mode(self, target_height: uint32) -> AsyncIterator[uint32]: self._sync_target = None async def get_confirmed_spendable_balance_for_wallet( - self, wallet_id: int, unspent_records: Optional[Set[WalletCoinRecord]] = None + self, wallet_id: int, unspent_records: Optional[set[WalletCoinRecord]] = None ) -> uint128: """ Returns the balance amount of all coins that are spendable. """ - spendable: Set[WalletCoinRecord] = await self.get_spendable_coins_for_wallet(wallet_id, unspent_records) + spendable: set[WalletCoinRecord] = await self.get_spendable_coins_for_wallet(wallet_id, unspent_records) spendable_amount: uint128 = uint128(0) for record in spendable: @@ -702,7 +689,7 @@ async def get_confirmed_spendable_balance_for_wallet( return spendable_amount async def does_coin_belong_to_wallet( - self, coin: Coin, wallet_id: int, hint_dict: Dict[bytes32, bytes32] = {} + self, coin: Coin, wallet_id: int, hint_dict: dict[bytes32, bytes32] = {} ) -> bool: """ Returns true if we have the key for this coin. @@ -713,7 +700,7 @@ async def does_coin_belong_to_wallet( async def get_confirmed_balance_for_wallet( self, wallet_id: int, - unspent_coin_records: Optional[Set[WalletCoinRecord]] = None, + unspent_coin_records: Optional[set[WalletCoinRecord]] = None, ) -> uint128: """ Returns the confirmed balance, including coinbase rewards that are not spendable. @@ -728,7 +715,7 @@ async def get_confirmed_balance_for_wallet( return uint128(sum(cr.coin.amount for cr in unspent_coin_records)) async def get_unconfirmed_balance( - self, wallet_id: int, unspent_coin_records: Optional[Set[WalletCoinRecord]] = None + self, wallet_id: int, unspent_coin_records: Optional[set[WalletCoinRecord]] = None ) -> uint128: """ Returns the balance, including coinbase rewards that are not spendable, and unconfirmed @@ -745,8 +732,8 @@ async def get_unconfirmed_balance( else: unspent_coin_records = await self.coin_store.get_unspent_coins_for_wallet(wallet_id) - unconfirmed_tx: List[TransactionRecord] = await self.tx_store.get_unconfirmed_for_wallet(wallet_id) - all_unspent_coins: Set[Coin] = {cr.coin for cr in unspent_coin_records} + unconfirmed_tx: list[TransactionRecord] = await self.tx_store.get_unconfirmed_for_wallet(wallet_id) + all_unspent_coins: set[Coin] = {cr.coin for cr in unspent_coin_records} for record in unconfirmed_tx: if record.type in CLAWBACK_INCOMING_TRANSACTION_TYPES: @@ -767,11 +754,11 @@ async def get_unconfirmed_balance( return uint128(sum(coin.amount for coin in all_unspent_coins)) - async def unconfirmed_removals_for_wallet(self, wallet_id: int) -> Dict[bytes32, Coin]: + async def unconfirmed_removals_for_wallet(self, wallet_id: int) -> dict[bytes32, Coin]: """ Returns new removals transactions that have not been confirmed yet. """ - removals: Dict[bytes32, Coin] = {} + removals: dict[bytes32, Coin] = {} unconfirmed_tx = await self.tx_store.get_unconfirmed_for_wallet(wallet_id) for record in unconfirmed_tx: if record.type in CLAWBACK_INCOMING_TRANSACTION_TYPES: @@ -781,19 +768,19 @@ async def unconfirmed_removals_for_wallet(self, wallet_id: int) -> Dict[bytes32, for coin in record.removals: if coin not in record.additions: removals[coin.name()] = coin - trade_removals: Dict[bytes32, WalletCoinRecord] = await self.trade_manager.get_locked_coins() + trade_removals: dict[bytes32, WalletCoinRecord] = await self.trade_manager.get_locked_coins() return {**removals, **{coin_id: cr.coin for coin_id, cr in trade_removals.items() if cr.wallet_id == wallet_id}} async def determine_coin_type( self, peer: WSChiaConnection, coin_state: CoinState, fork_height: Optional[uint32] - ) -> Tuple[Optional[WalletIdentifier], Optional[Streamable]]: + ) -> tuple[Optional[WalletIdentifier], Optional[Streamable]]: if coin_state.created_height is not None and ( self.is_pool_reward(uint32(coin_state.created_height), coin_state.coin) or self.is_farmer_reward(uint32(coin_state.created_height), coin_state.coin) ): return None, None - response: List[CoinState] = await self.wallet_node.get_coin_state( + response: list[CoinState] = await self.wallet_node.get_coin_state( [coin_state.coin.parent_coin_info], peer=peer, fork_height=fork_height ) if len(response) == 0: @@ -904,7 +891,7 @@ async def determine_coin_type( async def auto_claim_coins(self) -> None: # Get unspent clawback coin current_timestamp = self.blockchain.get_latest_timestamp() - clawback_coins: Dict[Coin, ClawbackMetadata] = {} + clawback_coins: dict[Coin, ClawbackMetadata] = {} tx_fee = uint64(self.config.get("auto_claim", {}).get("tx_fee", 0)) assert self.wallet_node.logged_in_fingerprint is not None tx_config_loader: TXConfigLoader = TXConfigLoader.from_json_dict(self.config.get("auto_claim", {})) @@ -961,14 +948,14 @@ async def auto_claim_coins(self) -> None: async def spend_clawback_coins( self, - clawback_coins: Dict[Coin, ClawbackMetadata], + clawback_coins: dict[Coin, ClawbackMetadata], fee: uint64, action_scope: WalletActionScope, force: bool = False, - extra_conditions: Tuple[Condition, ...] = tuple(), + extra_conditions: tuple[Condition, ...] = tuple(), ) -> None: assert len(clawback_coins) > 0 - coin_spends: List[CoinSpend] = [] + coin_spends: list[CoinSpend] = [] message: bytes32 = std_hash(b"".join([c.name() for c in clawback_coins.keys()])) now: uint64 = uint64(int(time.time())) derivation_record: Optional[DerivationRecord] = None @@ -995,7 +982,7 @@ async def spend_clawback_coins( assert derivation_record is not None amount = uint64(amount + coin.amount) # Remove the clawback hint since it is unnecessary for the XCH coin - memos: List[bytes] = [] if len(incoming_tx.memos) == 0 else incoming_tx.memos[0][1][1:] + memos: list[bytes] = [] if len(incoming_tx.memos) == 0 else incoming_tx.memos[0][1][1:] inner_puzzle: Program = self.main_wallet.puzzle_for_pk(derivation_record.pubkey) inner_solution: Program = self.main_wallet.make_solution( primaries=[ @@ -1069,7 +1056,7 @@ async def spend_clawback_coins( async with action_scope.use() as interface: interface.side_effects.transactions.append(tx_record) - async def filter_spam(self, new_coin_state: List[CoinState]) -> List[CoinState]: + async def filter_spam(self, new_coin_state: list[CoinState]) -> list[CoinState]: xch_spam_amount = self.config.get("xch_spam_amount", 1000000) # No need to filter anything if the filter is set to 1 or 0 mojos @@ -1080,8 +1067,8 @@ async def filter_spam(self, new_coin_state: List[CoinState]) -> List[CoinState]: small_unspent_count = await self.coin_store.count_small_unspent(xch_spam_amount) # if small_unspent_count > spam_filter_after_n_txs: - filtered_cs: List[CoinState] = [] - is_standard_wallet_phs: Set[bytes32] = set() + filtered_cs: list[CoinState] = [] + is_standard_wallet_phs: set[bytes32] = set() for cs in new_coin_state: # Only apply filter to new coins being sent to our wallet, that are very small @@ -1314,7 +1301,7 @@ async def handle_did( self.log.error("DID puzzle hash doesn't match, please check curried parameters.") return None # Create DID wallet - response: List[CoinState] = await self.wallet_node.get_coin_state([launch_id], peer=peer) + response: list[CoinState] = await self.wallet_node.get_coin_state([launch_id], peer=peer) if len(response) == 0: self.log.warning(f"Could not find the launch coin with ID: {launch_id}") return None @@ -1368,7 +1355,7 @@ async def get_minter_did(self, launcher_coin: Coin, peer: WSChiaConnection) -> O minter_did = None if minter_did is None: # Check if the NFT is a bulk minting - launcher_parent: List[CoinState] = await self.wallet_node.get_coin_state( + launcher_parent: list[CoinState] = await self.wallet_node.get_coin_state( [launcher_coin.parent_coin_info], peer=peer ) assert ( @@ -1381,7 +1368,7 @@ async def get_minter_did(self, launcher_coin: Coin, peer: WSChiaConnection) -> O launcher_parent[0].coin.parent_coin_info ) or self.constants.GENESIS_CHALLENGE[16:] in bytes(launcher_parent[0].coin.parent_coin_info): return None - did_coin: List[CoinState] = await self.wallet_node.get_coin_state( + did_coin: list[CoinState] = await self.wallet_node.get_coin_state( [launcher_parent[0].coin.parent_coin_info], peer=peer ) assert did_coin is not None and len(did_coin) == 1 and did_coin[0].spent_height is not None @@ -1712,7 +1699,7 @@ async def handle_vc(self, vc: VerifiedCredential) -> Optional[WalletIdentifier]: async def _add_coin_states( self, - coin_states: List[CoinState], + coin_states: list[CoinState], peer: WSChiaConnection, fork_height: Optional[uint32], ) -> None: @@ -1726,7 +1713,7 @@ async def _add_coin_states( curr_h = last_change_height trade_removals = await self.trade_manager.get_coins_of_interest() - all_unconfirmed: List[TransactionRecord] = await self.tx_store.get_all_unconfirmed() + all_unconfirmed: list[TransactionRecord] = await self.tx_store.get_all_unconfirmed() used_up_to = -1 ph_to_index_cache: LRUCache[bytes32, uint32] = LRUCache(100) @@ -1781,7 +1768,7 @@ async def _add_coin_states( # Confirm tx records for txs which we submitted for coins which aren't in our wallet if coin_state.created_height is not None and coin_state.spent_height is not None: all_unconfirmed = await self.tx_store.get_all_unconfirmed() - tx_records_to_confirm: List[TransactionRecord] = [] + tx_records_to_confirm: list[TransactionRecord] = [] for out_tx_record in all_unconfirmed: if coin_state.coin in out_tx_record.removals: tx_records_to_confirm.append(out_tx_record) @@ -1930,7 +1917,7 @@ async def _add_coin_states( # Reorg rollback adds reorged transactions so it's possible there is tx_record already # Even though we are just adding coin record to the db (after reorg) - tx_records: List[TransactionRecord] = [] + tx_records: list[TransactionRecord] = [] for out_tx_record in all_unconfirmed: for rem_coin in out_tx_record.removals: if rem_coin == coin_state.coin: @@ -1973,7 +1960,7 @@ async def _add_coin_states( await self.coin_store.set_spent(coin_name, uint32(coin_state.spent_height)) if record.coin_type == CoinType.CLAWBACK: await self.interested_store.remove_interested_coin_id(coin_state.coin.name()) - confirmed_tx_records: List[TransactionRecord] = [] + confirmed_tx_records: list[TransactionRecord] = [] for tx_record in all_unconfirmed: if tx_record.type in CLAWBACK_INCOMING_TRANSACTION_TYPES: @@ -2032,7 +2019,7 @@ async def _add_coin_states( curr_coin_state.coin.name(), uint32(curr_coin_state.spent_height) ) await self.add_interested_coin_ids([new_singleton_coin.name()]) - new_coin_state: List[CoinState] = await self.wallet_node.get_coin_state( + new_coin_state: list[CoinState] = await self.wallet_node.get_coin_state( [coin_name], peer=peer, fork_height=fork_height ) assert len(new_coin_state) == 1 @@ -2136,7 +2123,7 @@ async def _add_coin_states( async def add_coin_states( self, - coin_states: List[CoinState], + coin_states: list[CoinState], peer: WSChiaConnection, fork_height: Optional[uint32], ) -> bool: @@ -2203,7 +2190,7 @@ async def get_wallet_identifier_for_puzzle_hash(self, puzzle_hash: bytes32) -> O return None async def get_wallet_identifier_for_coin( - self, coin: Coin, hint_dict: Dict[bytes32, bytes32] = {} + self, coin: Coin, hint_dict: dict[bytes32, bytes32] = {} ) -> Optional[WalletIdentifier]: wallet_identifier = await self.puzzle_store.get_wallet_identifier_for_puzzle_hash(coin.puzzle_hash) if ( @@ -2229,7 +2216,7 @@ async def coin_added( self, coin: Coin, height: uint32, - all_unconfirmed_transaction_records: List[TransactionRecord], + all_unconfirmed_transaction_records: list[TransactionRecord], wallet_id: uint32, wallet_type: WalletType, peer: WSChiaConnection, @@ -2309,13 +2296,13 @@ async def coin_added( async def add_pending_transactions( self, - tx_records: List[TransactionRecord], + tx_records: list[TransactionRecord], push: bool = True, merge_spends: bool = True, sign: Optional[bool] = None, - additional_signing_responses: Optional[List[SigningResponse]] = None, - extra_spends: Optional[List[WalletSpendBundle]] = None, - ) -> List[TransactionRecord]: + additional_signing_responses: Optional[list[SigningResponse]] = None, + extra_spends: Optional[list[WalletSpendBundle]] = None, + ) -> list[TransactionRecord]: """ Add a list of transactions to be submitted to the full node. Aggregates the `spend_bundle` property for each transaction onto the first transaction in the list. @@ -2439,7 +2426,7 @@ async def remove_from_queue( else: self.state_changed("tx_update", tx.wallet_id, {"transaction": tx}) - async def get_all_transactions(self, wallet_id: int) -> List[TransactionRecord]: + async def get_all_transactions(self, wallet_id: int) -> list[TransactionRecord]: """ Retrieves all confirmed and pending transactions """ @@ -2453,7 +2440,7 @@ async def get_coin_record_by_wallet_record(self, wr: WalletCoinRecord) -> CoinRe timestamp: uint64 = await self.wallet_node.get_timestamp_for_height(wr.confirmed_block_height) return wr.to_coin_record(timestamp) - async def get_coin_records_by_coin_ids(self, **kwargs: Any) -> List[CoinRecord]: + async def get_coin_records_by_coin_ids(self, **kwargs: Any) -> list[CoinRecord]: result = await self.coin_store.get_coin_records(**kwargs) return [await self.get_coin_record_by_wallet_record(record) for record in result.records] @@ -2465,7 +2452,7 @@ async def get_wallet_for_coin(self, coin_id: bytes32) -> Optional[WalletProtocol wallet = self.wallets[wallet_id] return wallet - async def reorg_rollback(self, height: int) -> List[uint32]: + async def reorg_rollback(self, height: int) -> list[uint32]: """ Rolls back and updates the coin_store and transaction store. It's possible this height is the tip, or even beyond the tip. @@ -2475,7 +2462,7 @@ async def reorg_rollback(self, height: int) -> List[uint32]: await self.coin_store.rollback_to_block(height) await self.interested_store.rollback_to_block(height) await self.dl_store.rollback_to_block(height) - reorged: List[TransactionRecord] = await self.tx_store.get_transaction_above(height) + reorged: list[TransactionRecord] = await self.tx_store.get_transaction_above(height) await self.tx_store.rollback_to_block(height) for record in reorged: if TransactionType(record.type) in [ @@ -2489,7 +2476,7 @@ async def reorg_rollback(self, height: int) -> List[uint32]: await self.tx_store.tx_reorged(record) # Removes wallets that were created from a blockchain transaction which got reorged. - remove_ids: List[uint32] = [] + remove_ids: list[uint32] = [] for wallet_id, wallet in self.wallets.items(): if wallet.type() == WalletType.POOLING_WALLET.value: assert isinstance(wallet, PoolWallet) @@ -2508,7 +2495,7 @@ async def _await_closed(self) -> None: def unlink_db(self) -> None: Path(self.db_path).unlink() - async def get_all_wallet_info_entries(self, wallet_type: Optional[WalletType] = None) -> List[WalletInfo]: + async def get_all_wallet_info_entries(self, wallet_type: Optional[WalletType] = None) -> list[WalletInfo]: return await self.user_store.get_all_wallet_info_entries(wallet_type) async def get_wallet_for_asset_id(self, asset_id: str) -> Optional[WalletProtocol[Any]]: @@ -2554,8 +2541,8 @@ async def add_new_wallet(self, wallet: WalletProtocol[Any]) -> None: self.state_changed("wallet_created") async def get_spendable_coins_for_wallet( - self, wallet_id: int, records: Optional[Set[WalletCoinRecord]] = None - ) -> Set[WalletCoinRecord]: + self, wallet_id: int, records: Optional[set[WalletCoinRecord]] = None + ) -> set[WalletCoinRecord]: wallet_type = self.wallets[uint32(wallet_id)].type() if records is None: if wallet_type == WalletType.CRCAT: @@ -2564,8 +2551,8 @@ async def get_spendable_coins_for_wallet( records = await self.coin_store.get_unspent_coins_for_wallet(wallet_id) # Coins that are currently part of a transaction - unconfirmed_tx: List[TransactionRecord] = await self.tx_store.get_unconfirmed_for_wallet(wallet_id) - removal_dict: Dict[bytes32, Coin] = {} + unconfirmed_tx: list[TransactionRecord] = await self.tx_store.get_unconfirmed_for_wallet(wallet_id) + removal_dict: dict[bytes32, Coin] = {} for tx in unconfirmed_tx: for coin in tx.removals: # TODO, "if" might not be necessary once unconfirmed tx doesn't contain coins for other wallets @@ -2573,7 +2560,7 @@ async def get_spendable_coins_for_wallet( removal_dict[coin.name()] = coin # Coins that are part of the trade - offer_locked_coins: Dict[bytes32, WalletCoinRecord] = await self.trade_manager.get_locked_coins() + offer_locked_coins: dict[bytes32, WalletCoinRecord] = await self.trade_manager.get_locked_coins() filtered = set() for record in records: @@ -2595,7 +2582,7 @@ async def new_peak(self, height: uint32) -> None: if self.wallet_node.last_wallet_tx_resend_time < current_time - self.wallet_node.wallet_tx_resend_timeout_secs: self.tx_pending_changed() - async def add_interested_puzzle_hashes(self, puzzle_hashes: List[bytes32], wallet_ids: List[int]) -> None: + async def add_interested_puzzle_hashes(self, puzzle_hashes: list[bytes32], wallet_ids: list[int]) -> None: # TODO: It's unclear if the intended use for this is that each puzzle hash should store all # the elements of wallet_ids. It only stores one wallet_id per puzzle hash in the interested_store # but the coin_cache keeps all wallet_ids for each puzzle hash @@ -2610,7 +2597,7 @@ async def add_interested_puzzle_hashes(self, puzzle_hashes: List[bytes32], walle if len(puzzle_hashes) > 0: await self.wallet_node.new_peak_queue.subscribe_to_puzzle_hashes(puzzle_hashes) - async def add_interested_coin_ids(self, coin_ids: List[bytes32], wallet_ids: List[int] = []) -> None: + async def add_interested_coin_ids(self, coin_ids: list[bytes32], wallet_ids: list[int] = []) -> None: # TODO: FIX: wallet_ids is sometimes populated unexpectedly when called from add_pending_transaction for coin_id in coin_ids: if coin_id in self.interested_coin_cache: @@ -2625,7 +2612,7 @@ async def add_interested_coin_ids(self, coin_ids: List[bytes32], wallet_ids: Lis await self.wallet_node.new_peak_queue.subscribe_to_coin_ids(coin_ids) async def delete_trade_transactions(self, trade_id: bytes32) -> None: - txs: List[TransactionRecord] = await self.tx_store.get_transactions_by_trade_id(trade_id) + txs: list[TransactionRecord] = await self.tx_store.get_transactions_by_trade_id(trade_id) for tx in txs: await self.tx_store.delete_transaction_record(tx.name) @@ -2665,15 +2652,15 @@ async def sum_hint_for_pubkey(self, pk: bytes) -> Optional[SumHint]: async def path_hint_for_pubkey(self, pk: bytes) -> Optional[PathHint]: return await self.main_wallet.path_hint_for_pubkey(pk) - async def key_hints_for_pubkeys(self, pks: List[bytes]) -> KeyHints: + async def key_hints_for_pubkeys(self, pks: list[bytes]) -> KeyHints: return KeyHints( [sum_hint for pk in pks for sum_hint in (await self.sum_hint_for_pubkey(pk),) if sum_hint is not None], [path_hint for pk in pks for path_hint in (await self.path_hint_for_pubkey(pk),) if path_hint is not None], ) - async def gather_signing_info(self, coin_spends: List[Spend]) -> SigningInstructions: - pks: List[bytes] = [] - signing_targets: List[SigningTarget] = [] + async def gather_signing_info(self, coin_spends: list[Spend]) -> SigningInstructions: + pks: list[bytes] = [] + signing_targets: list[SigningTarget] = [] for coin_spend in coin_spends: _coin_spend = coin_spend.as_coin_spend() # Get AGG_SIG conditions @@ -2696,10 +2683,10 @@ async def gather_signing_info(self, coin_spends: List[Spend]) -> SigningInstruct signing_targets, ) - async def gather_signing_info_for_bundles(self, bundles: List[WalletSpendBundle]) -> List[UnsignedTransaction]: - utxs: List[UnsignedTransaction] = [] + async def gather_signing_info_for_bundles(self, bundles: list[WalletSpendBundle]) -> list[UnsignedTransaction]: + utxs: list[UnsignedTransaction] = [] for bundle in bundles: - signer_protocol_spends: List[Spend] = [Spend.from_coin_spend(spend) for spend in bundle.coin_spends] + signer_protocol_spends: list[Spend] = [Spend.from_coin_spend(spend) for spend in bundle.coin_spends] utxs.append( UnsignedTransaction( TransactionInfo(signer_protocol_spends), await self.gather_signing_info(signer_protocol_spends) @@ -2708,21 +2695,21 @@ async def gather_signing_info_for_bundles(self, bundles: List[WalletSpendBundle] return utxs - async def gather_signing_info_for_txs(self, txs: List[TransactionRecord]) -> List[UnsignedTransaction]: + async def gather_signing_info_for_txs(self, txs: list[TransactionRecord]) -> list[UnsignedTransaction]: return await self.gather_signing_info_for_bundles( [tx.spend_bundle for tx in txs if tx.spend_bundle is not None] ) - async def gather_signing_info_for_trades(self, offers: List[Offer]) -> List[UnsignedTransaction]: + async def gather_signing_info_for_trades(self, offers: list[Offer]) -> list[UnsignedTransaction]: return await self.gather_signing_info_for_bundles([offer._bundle for offer in offers]) async def execute_signing_instructions( self, signing_instructions: SigningInstructions, partial_allowed: bool = False - ) -> List[SigningResponse]: + ) -> list[SigningResponse]: return await self.main_wallet.execute_signing_instructions(signing_instructions, partial_allowed) async def apply_signatures( - self, spends: List[Spend], signing_responses: List[SigningResponse] + self, spends: list[Spend], signing_responses: list[SigningResponse] ) -> SignedTransaction: return await self.main_wallet.apply_signatures(spends, signing_responses) @@ -2736,17 +2723,17 @@ def signed_tx_to_spendbundle(self, signed_tx: SignedTransaction) -> WalletSpendB async def sign_transactions( self, - tx_records: List[TransactionRecord], - additional_signing_responses: List[SigningResponse] = [], + tx_records: list[TransactionRecord], + additional_signing_responses: list[SigningResponse] = [], partial_allowed: bool = False, - ) -> Tuple[List[TransactionRecord], List[SigningResponse]]: - unsigned_txs: List[UnsignedTransaction] = await self.gather_signing_info_for_txs(tx_records) - new_txs: List[TransactionRecord] = [] + ) -> tuple[list[TransactionRecord], list[SigningResponse]]: + unsigned_txs: list[UnsignedTransaction] = await self.gather_signing_info_for_txs(tx_records) + new_txs: list[TransactionRecord] = [] all_signing_responses = additional_signing_responses.copy() for unsigned_tx, tx in zip( unsigned_txs, [tx_record for tx_record in tx_records if tx_record.spend_bundle is not None] ): - signing_responses: List[SigningResponse] = await self.execute_signing_instructions( + signing_responses: list[SigningResponse] = await self.execute_signing_instructions( unsigned_tx.signing_instructions, partial_allowed=partial_allowed ) all_signing_responses.extend(signing_responses) @@ -2762,15 +2749,15 @@ async def sign_transactions( async def sign_offers( self, - offers: List[Offer], - additional_signing_responses: List[SigningResponse] = [], + offers: list[Offer], + additional_signing_responses: list[SigningResponse] = [], partial_allowed: bool = False, - ) -> Tuple[List[Offer], List[SigningResponse]]: - unsigned_txs: List[UnsignedTransaction] = await self.gather_signing_info_for_trades(offers) - new_offers: List[Offer] = [] + ) -> tuple[list[Offer], list[SigningResponse]]: + unsigned_txs: list[UnsignedTransaction] = await self.gather_signing_info_for_trades(offers) + new_offers: list[Offer] = [] all_signing_responses = additional_signing_responses.copy() for unsigned_tx, offer in zip(unsigned_txs, [offer for offer in offers]): - signing_responses: List[SigningResponse] = await self.execute_signing_instructions( + signing_responses: list[SigningResponse] = await self.execute_signing_instructions( unsigned_tx.signing_instructions, partial_allowed=partial_allowed ) all_signing_responses.extend(signing_responses) @@ -2785,12 +2772,12 @@ async def sign_offers( async def sign_bundle( self, - coin_spends: List[CoinSpend], - additional_signing_responses: List[SigningResponse] = [], + coin_spends: list[CoinSpend], + additional_signing_responses: list[SigningResponse] = [], partial_allowed: bool = False, - ) -> Tuple[WalletSpendBundle, List[SigningResponse]]: + ) -> tuple[WalletSpendBundle, list[SigningResponse]]: [unsigned_tx] = await self.gather_signing_info_for_bundles([WalletSpendBundle(coin_spends, G2Element())]) - signing_responses: List[SigningResponse] = await self.execute_signing_instructions( + signing_responses: list[SigningResponse] = await self.execute_signing_instructions( unsigned_tx.signing_instructions, partial_allowed=partial_allowed ) return ( @@ -2803,8 +2790,8 @@ async def sign_bundle( signing_responses, ) - async def submit_transactions(self, signed_txs: List[SignedTransaction]) -> List[bytes32]: - bundles: List[WalletSpendBundle] = [self.signed_tx_to_spendbundle(tx) for tx in signed_txs] + async def submit_transactions(self, signed_txs: list[SignedTransaction]) -> list[bytes32]: + bundles: list[WalletSpendBundle] = [self.signed_tx_to_spendbundle(tx) for tx in signed_txs] for bundle in bundles: await self.wallet_node.push_tx(bundle) return [bundle.name() for bundle in bundles] @@ -2816,8 +2803,8 @@ async def new_action_scope( push: bool = False, merge_spends: bool = True, sign: Optional[bool] = None, - additional_signing_responses: List[SigningResponse] = [], - extra_spends: List[WalletSpendBundle] = [], + additional_signing_responses: list[SigningResponse] = [], + extra_spends: list[WalletSpendBundle] = [], ) -> AsyncIterator[WalletActionScope]: async with new_wallet_action_scope( self, diff --git a/chia/wallet/wallet_transaction_store.py b/chia/wallet/wallet_transaction_store.py index 578501a0e429..e70cf23f8a77 100644 --- a/chia/wallet/wallet_transaction_store.py +++ b/chia/wallet/wallet_transaction_store.py @@ -3,7 +3,7 @@ import dataclasses import logging import time -from typing import Dict, List, Optional, Tuple +from typing import Optional import aiosqlite @@ -21,7 +21,7 @@ log = logging.getLogger(__name__) -def filter_ok_mempool_status(sent_to: List[Tuple[str, uint8, Optional[str]]]) -> List[Tuple[str, uint8, Optional[str]]]: +def filter_ok_mempool_status(sent_to: list[tuple[str, uint8, Optional[str]]]) -> list[tuple[str, uint8, Optional[str]]]: """Remove SUCCESS and PENDING status records from a TransactionRecord sent_to field""" new_sent_to = [] for peer, status, err in sent_to: @@ -36,7 +36,7 @@ class WalletTransactionStore: """ db_wrapper: DBWrapper2 - tx_submitted: Dict[bytes32, Tuple[int, int]] # tx_id: [time submitted: count] + tx_submitted: dict[bytes32, tuple[int, int]] # tx_id: [time submitted: count] last_wallet_tx_resend_time: int # Epoch time in seconds @classmethod @@ -83,7 +83,7 @@ async def create(cls, db_wrapper: DBWrapper2): try: await conn.execute("CREATE TABLE tx_times(txid blob PRIMARY KEY, valid_times blob)") async with await conn.execute("SELECT bundle_id from transaction_record") as cursor: - txids: List[bytes32] = [bytes32(row[0]) for row in await cursor.fetchall()] + txids: list[bytes32] = [bytes32(row[0]) for row in await cursor.fetchall()] await conn.executemany( "INSERT INTO tx_times (txid, valid_times) VALUES(?, ?)", [(id, bytes(ConditionValidTimes())) for id in txids], @@ -222,7 +222,7 @@ async def get_transaction_record(self, tx_id: bytes32) -> Optional[TransactionRe # queries the state and one that updates it. Also, include_accepted_txs=True # might be a separate function too. # also, the current time should be passed in as a parameter - async def get_not_sent(self, *, include_accepted_txs=False) -> List[TransactionRecord]: + async def get_not_sent(self, *, include_accepted_txs=False) -> list[TransactionRecord]: """ Returns the list of transactions that have not been received by full node yet. """ @@ -256,7 +256,7 @@ async def get_not_sent(self, *, include_accepted_txs=False) -> List[TransactionR return records - async def get_farming_rewards(self) -> List[TransactionRecord]: + async def get_farming_rewards(self) -> list[TransactionRecord]: """ Returns the list of all farming rewards. """ @@ -269,7 +269,7 @@ async def get_farming_rewards(self) -> List[TransactionRecord]: ) return await self._get_new_tx_records_from_old([TransactionRecordOld.from_bytes(row[0]) for row in rows]) - async def get_all_unconfirmed(self) -> List[TransactionRecord]: + async def get_all_unconfirmed(self) -> list[TransactionRecord]: """ Returns the list of all transaction that have not yet been confirmed. """ @@ -277,7 +277,7 @@ async def get_all_unconfirmed(self) -> List[TransactionRecord]: rows = await conn.execute_fetchall("SELECT transaction_record from transaction_record WHERE confirmed=0") return await self._get_new_tx_records_from_old([TransactionRecordOld.from_bytes(row[0]) for row in rows]) - async def get_unconfirmed_for_wallet(self, wallet_id: int) -> List[TransactionRecord]: + async def get_unconfirmed_for_wallet(self, wallet_id: int) -> list[TransactionRecord]: """ Returns the list of transaction that have not yet been confirmed. """ @@ -297,7 +297,7 @@ async def get_transactions_between( confirmed: Optional[bool] = None, to_puzzle_hash: Optional[bytes32] = None, type_filter: Optional[TransactionTypeFilter] = None, - ) -> List[TransactionRecord]: + ) -> list[TransactionRecord]: """Return a list of transaction between start and end index. List is in reverse chronological order. start = 0 is most recent transaction """ @@ -366,7 +366,7 @@ async def get_transaction_count_for_wallet( ) return 0 if len(rows) == 0 else rows[0][0] - async def get_all_transactions_for_wallet(self, wallet_id: int, type: int = None) -> List[TransactionRecord]: + async def get_all_transactions_for_wallet(self, wallet_id: int, type: int = None) -> list[TransactionRecord]: """ Returns all stored transactions. """ @@ -385,7 +385,7 @@ async def get_all_transactions_for_wallet(self, wallet_id: int, type: int = None ) return await self._get_new_tx_records_from_old([TransactionRecordOld.from_bytes(row[0]) for row in rows]) - async def get_all_transactions(self) -> List[TransactionRecord]: + async def get_all_transactions(self) -> list[TransactionRecord]: """ Returns all stored transactions. """ @@ -393,7 +393,7 @@ async def get_all_transactions(self) -> List[TransactionRecord]: rows = await conn.execute_fetchall("SELECT transaction_record from transaction_record") return await self._get_new_tx_records_from_old([TransactionRecordOld.from_bytes(row[0]) for row in rows]) - async def get_transaction_above(self, height: int) -> List[TransactionRecord]: + async def get_transaction_above(self, height: int) -> list[TransactionRecord]: # Can be -1 (get all tx) async with self.db_wrapper.reader_no_transaction() as conn: @@ -402,7 +402,7 @@ async def get_transaction_above(self, height: int) -> List[TransactionRecord]: ) return await self._get_new_tx_records_from_old([TransactionRecordOld.from_bytes(row[0]) for row in rows]) - async def get_transactions_by_trade_id(self, trade_id: bytes32) -> List[TransactionRecord]: + async def get_transactions_by_trade_id(self, trade_id: bytes32) -> list[TransactionRecord]: async with self.db_wrapper.reader_no_transaction() as conn: rows = await conn.execute_fetchall( "SELECT transaction_record from transaction_record WHERE trade_id=?", (trade_id,) @@ -428,11 +428,11 @@ async def delete_unconfirmed_transactions(self, wallet_id: int): ) ).close() - async def _get_new_tx_records_from_old(self, old_records: List[TransactionRecordOld]) -> List[TransactionRecord]: - tx_id_to_valid_times: Dict[bytes, ConditionValidTimes] = {} + async def _get_new_tx_records_from_old(self, old_records: list[TransactionRecordOld]) -> list[TransactionRecord]: + tx_id_to_valid_times: dict[bytes, ConditionValidTimes] = {} empty_valid_times = ConditionValidTimes() async with self.db_wrapper.reader_no_transaction() as conn: - chunked_records: List[List[TransactionRecordOld]] = [ + chunked_records: list[list[TransactionRecordOld]] = [ old_records[i : min(len(old_records), i + self.db_wrapper.host_parameter_limit)] for i in range(0, len(old_records), self.db_wrapper.host_parameter_limit) ] diff --git a/chia/wallet/wallet_user_store.py b/chia/wallet/wallet_user_store.py index e5f942896950..9a522c886748 100644 --- a/chia/wallet/wallet_user_store.py +++ b/chia/wallet/wallet_user_store.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import List, Optional +from typing import Optional from chia.util.db_wrapper import DBWrapper2, execute_fetchone from chia.util.ints import uint32 @@ -86,7 +86,7 @@ async def get_last_wallet(self) -> Optional[WalletInfo]: return None if row is None else await self.get_wallet_by_id(row[0]) - async def get_all_wallet_info_entries(self, wallet_type: Optional[WalletType] = None) -> List[WalletInfo]: + async def get_all_wallet_info_entries(self, wallet_type: Optional[WalletType] = None) -> list[WalletInfo]: """ Return a set containing all wallets, optionally with a specific WalletType """ diff --git a/chia/wallet/wallet_weight_proof_handler.py b/chia/wallet/wallet_weight_proof_handler.py index 575fbdd5c75f..4444fb6a3dcd 100644 --- a/chia/wallet/wallet_weight_proof_handler.py +++ b/chia/wallet/wallet_weight_proof_handler.py @@ -6,7 +6,7 @@ import time from concurrent.futures.process import ProcessPoolExecutor from multiprocessing.context import BaseContext -from typing import IO, List, Optional +from typing import IO, Optional from chia.consensus.block_record import BlockRecord from chia.consensus.constants import ConsensusConstants @@ -44,7 +44,7 @@ def cancel_weight_proof_tasks(self) -> None: async def validate_weight_proof( self, weight_proof: WeightProof, skip_segment_validation: bool = False, old_proof: Optional[WeightProof] = None - ) -> List[BlockRecord]: + ) -> list[BlockRecord]: start_time = time.time() summaries, sub_epoch_weight_list = _validate_sub_epoch_summaries(self._constants, weight_proof) await asyncio.sleep(0) # break up otherwise multi-second sync code diff --git a/install.sh b/install.sh index 363c4074e24b..efed665b91d1 100755 --- a/install.sh +++ b/install.sh @@ -61,7 +61,7 @@ fi git submodule update --init mozilla-ca # You can specify preferred python version by exporting `INSTALL_PYTHON_VERSION` -# e.g. `export INSTALL_PYTHON_VERSION=3.8` +# e.g. `export INSTALL_PYTHON_VERSION=3.9` INSTALL_PYTHON_PATH= PYTHON_MAJOR_VER= PYTHON_MINOR_VER= @@ -74,7 +74,7 @@ OPENSSL_VERSION_INT= find_python() { set +e unset BEST_VERSION - for V in 312 3.12 311 3.11 310 3.10 39 3.9 38 3.8 3; do + for V in 312 3.12 311 3.11 310 3.10 39 3.9 3; do if command -v python$V >/dev/null; then if [ "$BEST_VERSION" = "" ]; then BEST_VERSION=$V @@ -138,7 +138,7 @@ if ! command -v "$INSTALL_PYTHON_PATH" >/dev/null; then fi if [ "$PYTHON_MAJOR_VER" -ne "3" ] || [ "$PYTHON_MINOR_VER" -lt "7" ] || [ "$PYTHON_MINOR_VER" -ge "13" ]; then - echo "Chia requires Python version >= 3.8 and < 3.13.0" >&2 + echo "Chia requires Python version >= 3.9 and < 3.13.0" >&2 echo "Current Python version = $INSTALL_PYTHON_VERSION" >&2 # If Arch, direct to Arch Wiki if type pacman >/dev/null 2>&1 && [ -f "/etc/arch-release" ]; then diff --git a/manage-mypy.py b/manage-mypy.py index fb2cd07dbd6f..aedebf5f86d4 100755 --- a/manage-mypy.py +++ b/manage-mypy.py @@ -5,7 +5,7 @@ import sys from pathlib import Path from subprocess import CalledProcessError, run -from typing import List, cast +from typing import cast import click @@ -19,23 +19,23 @@ def write_file(path: Path, content: str) -> None: file.write(content.strip() + "\n") -def get_mypy_failures() -> List[str]: +def get_mypy_failures() -> list[str]: # Get a list of all mypy failures when only running mypy with the template file `mypy.ini.template` command = [sys.executable, "activated.py", "mypy", "--config-file", "mypy.ini.template"] try: run(command, capture_output=True, check=True, encoding="utf-8") except CalledProcessError as e: if e.returncode == 1: - return cast(List[str], e.stdout.splitlines()) + return cast(list[str], e.stdout.splitlines()) raise click.ClickException(f"Unexpected mypy failure:\n{e.stderr}") from e return [] -def split_mypy_failure(line: str) -> List[str]: +def split_mypy_failure(line: str) -> list[str]: return list(Path(line[: line.find(".py")]).parts) -def build_exclusion_list(mypy_failures: List[str]) -> List[str]: +def build_exclusion_list(mypy_failures: list[str]) -> list[str]: # Create content for `mypy-exclusions.txt` from a list of mypy failures which look like: # # chia/cmds/wallet_funcs.py:1251: error: Incompatible types in assignment (expression has type "str", variable has type "int") [assignment] # noqa return sorted({".".join(split_mypy_failure(line)) for line in mypy_failures[:-1]}) diff --git a/poetry.lock b/poetry.lock index e1a7c3a0d7bd..a9c98fc50c84 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1660,7 +1660,6 @@ files = [ [package.dependencies] importlib-metadata = {version = ">=4.11.4", markers = "python_version < \"3.12\""} -importlib-resources = {version = "*", markers = "python_version < \"3.9\""} "jaraco.classes" = "*" "jaraco.context" = "*" "jaraco.functools" = "*" @@ -2284,24 +2283,6 @@ docs = ["sphinx (>=1.7.1)"] redis = ["redis"] tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "pytest-timeout (>=2.1.0)", "redis", "sphinx (>=6.0.0)", "types-redis"] -[[package]] -name = "pre-commit" -version = "3.5.0" -description = "A framework for managing and maintaining multi-language pre-commit hooks." -optional = true -python-versions = ">=3.8" -files = [ - {file = "pre_commit-3.5.0-py2.py3-none-any.whl", hash = "sha256:841dc9aef25daba9a0238cd27984041fa0467b4199fc4852e27950664919f660"}, - {file = "pre_commit-3.5.0.tar.gz", hash = "sha256:5804465c675b659b0862f07907f96295d490822a450c4c40e747d0b1c6ebcb32"}, -] - -[package.dependencies] -cfgv = ">=2.0.0" -identify = ">=1.0.0" -nodeenv = ">=0.11.1" -pyyaml = ">=5.1" -virtualenv = ">=20.10.0" - [[package]] name = "pre-commit" version = "3.7.1" @@ -3456,5 +3437,5 @@ upnp = ["miniupnpc"] [metadata] lock-version = "2.0" -python-versions = ">=3.8.10, <3.13" -content-hash = "9ed9e509b09713592b90aad62ebf6bd4812df6972934781526902538bdb8328e" +python-versions = ">=3.9, <3.13" +content-hash = "f5fa932e4e2facb84efb24d244d477db40be0eb4307378f3bea7355c5ccbb2e0" diff --git a/pyproject.toml b/pyproject.toml index 0101b422140b..f8e80ab793f4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -37,7 +37,7 @@ priority = "supplemental" "Changelog" = "https://github.com/Chia-Network/chia-blockchain/blob/main/CHANGELOG.md" [tool.poetry.dependencies] -python = ">=3.8.10, <3.13" +python = ">=3.9, <3.13" aiofiles = "24.1.0" # Async IO for files aiohttp = "3.10.4" # HTTP server for full node rpc aiosqlite = "0.20.0" # asyncio wrapper for sqlite, to store blocks @@ -150,7 +150,7 @@ build-backend = "poetry_dynamic_versioning.backend" [tool.black] line-length = 120 -target-version = ['py38', 'py39', 'py310', 'py311', 'py312'] +target-version = ['py39', 'py310', 'py311', 'py312'] include = ''' ^/( [^/]*.py diff --git a/tools/analyze-chain.py b/tools/analyze-chain.py index 2a3092b2f9e6..3278e6898401 100755 --- a/tools/analyze-chain.py +++ b/tools/analyze-chain.py @@ -7,7 +7,7 @@ from functools import partial from pathlib import Path from time import time -from typing import Callable, List, Optional, Tuple, Union +from typing import Callable, Optional, Union import click import zstd @@ -34,8 +34,8 @@ # exactly one of those will hold a value and the number of seconds it took to # run def run_gen( - generator_program: SerializedProgram, block_program_args: List[bytes], flags: int -) -> Tuple[Optional[int], Optional[SpendBundleConditions], float]: + generator_program: SerializedProgram, block_program_args: list[bytes], flags: int +) -> tuple[Optional[int], Optional[SpendBundleConditions], float]: try: start_time = time() err, result = run_block_generator( @@ -71,7 +71,7 @@ def callable_for_module_function_path(call: str) -> Callable: @click.option("--end", default=None, help="last block to examine") @click.option("--call", default=None, help="function to pass block iterator to in form `module:function`") def main(file: Path, mempool_mode: bool, start: int, end: Optional[int], call: Optional[str], verify_signatures: bool): - call_f: Callable[[Union[BlockInfo, FullBlock], bytes32, int, List[bytes], float, int], None] + call_f: Callable[[Union[BlockInfo, FullBlock], bytes32, int, list[bytes], float, int], None] if call is None: call_f = partial(default_call, verify_signatures) else: @@ -124,7 +124,7 @@ def default_call( block: Union[BlockInfo, FullBlock], hh: bytes32, height: int, - generator_blobs: List[bytes], + generator_blobs: list[bytes], ref_lookup_time: float, flags: int, ) -> None: @@ -148,8 +148,8 @@ def default_call( if verify_signatures: assert isinstance(block, FullBlock) # create hash_key list for aggsig check - pairs_pks: List[G1Element] = [] - pairs_msgs: List[bytes] = [] + pairs_pks: list[G1Element] = [] + pairs_msgs: list[bytes] = [] pairs_pks, pairs_msgs = pkm_pairs(result, DEFAULT_CONSTANTS.AGG_SIG_ME_ADDITIONAL_DATA) assert block.transactions_info is not None assert block.transactions_info.aggregated_signature is not None diff --git a/tools/analyze_memory_profile.py b/tools/analyze_memory_profile.py index 41c3844411ab..976151ce95ee 100644 --- a/tools/analyze_memory_profile.py +++ b/tools/analyze_memory_profile.py @@ -6,7 +6,7 @@ from functools import lru_cache from subprocess import check_call from sys import stdout -from typing import Dict, List, Optional, Set +from typing import Optional import click from colorama import Back, Fore, Style, init @@ -27,7 +27,7 @@ class Frame: size: int fun_id: int count: int = 1 - callers: Dict[str, CallInfo] = field(default_factory=dict) + callers: dict[str, CallInfo] = field(default_factory=dict) def add(self, size: int) -> None: self.size += size @@ -49,7 +49,7 @@ def fontcolor(pct: float) -> str: def resolve_function(file: str, line: int) -> str: try: with open(file) as f: - all_lines: List[str] = [] + all_lines: list[str] = [] for row in f: all_lines.append(row) @@ -116,7 +116,7 @@ def analyze_slot(ctx: click.Context, slot: int) -> None: print(f"generating call tree for slot {slot}") - all_frames: Dict[str, Frame] = {} + all_frames: dict[str, Frame] = {} total_size = 0 calls = 0 @@ -129,7 +129,7 @@ def analyze_slot(ctx: click.Context, slot: int) -> None: stdout.write(f"\rtotal size: {total_size/1000000:0.3f} MB ({calls} allocs) ") # to support recursive functions, make sure we only visit each frame # once during traversal - visited: Set[str] = set() + visited: set[str] = set() for frame in trace.traceback: fun = resolve_function(frame.filename, frame.lineno) if fun in visited: diff --git a/tools/chialispp.py b/tools/chialispp.py index deb2036bdc22..334b5e866f93 100644 --- a/tools/chialispp.py +++ b/tools/chialispp.py @@ -2,18 +2,18 @@ import sys from pathlib import Path -from typing import Any, Dict, List, Optional +from typing import Any, Optional # A simple class for separating a line into code and comment class Line: - def __init__(self, code: List[bytes], comment: Optional[List[bytes]]): + def __init__(self, code: list[bytes], comment: Optional[list[bytes]]): self.code = code self.comment = comment # Remove all whitespace from the beginning of a byte array -def trim_ascii_start(line: List[bytes]) -> List[bytes]: +def trim_ascii_start(line: list[bytes]) -> list[bytes]: first_non_ws: int = 0 got_one: bool = False @@ -30,7 +30,7 @@ def trim_ascii_start(line: List[bytes]) -> List[bytes]: # Remove all whitespace from the end of a byte array -def trim_ascii_end(line: List[bytes]) -> List[bytes]: +def trim_ascii_end(line: list[bytes]) -> list[bytes]: last_non_ws: int = 0 got_one: bool = False @@ -51,21 +51,21 @@ def __init__(self) -> None: self.paren_level: int = 0 self.out_col: int = 0 # The colum we are at while outputting a line self.cur_line: int = 0 - self.line: List[bytes] = [] - self.comment: Optional[List[bytes]] = None - self.lines: List[List[bytes]] = [] - self.work_lines: List[Line] = [] + self.line: list[bytes] = [] + self.comment: Optional[list[bytes]] = None + self.lines: list[list[bytes]] = [] + self.work_lines: list[Line] = [] self.getting_form_name: int = 0 self.got_form_on_line: int = 0 - self.form_name: List[bytes] = [] + self.form_name: list[bytes] = [] self.reset_form_indent: bool = False # self.def_started = False - self.result_line: List[bytes] = [] + self.result_line: list[bytes] = [] # self.definition_starts = [] # self.extra_def_lines = [] - self.indent_stack: List[int] = [] - self.result: List[List[bytes]] = [] - self.config: Dict[str, Any] = { + self.indent_stack: list[int] = [] + self.result: list[list[bytes]] = [] + self.config: dict[str, Any] = { "gnu_comment_conventions": False, } @@ -322,7 +322,7 @@ def retire_indent(self) -> None: self.indent_stack.pop() -def concat_byte_array(bs: List[bytes]) -> bytes: +def concat_byte_array(bs: list[bytes]) -> bytes: return b"".join(bs) diff --git a/tools/cpu_utilization.py b/tools/cpu_utilization.py index 78c5242d644c..9acc028b0d53 100644 --- a/tools/cpu_utilization.py +++ b/tools/cpu_utilization.py @@ -3,7 +3,6 @@ import time from dataclasses import dataclass from subprocess import check_call -from typing import Dict, List import click import psutil @@ -32,8 +31,8 @@ class Counters: def main(pid: int, output: str, threads: bool) -> None: process = psutil.Process(pid) - stats: Dict[int, Dict[int, Counters]] = {pid: {}} - timestamps: List[float] = [] + stats: dict[int, dict[int, Counters]] = {pid: {}} + timestamps: list[float] = [] try: step = 0 diff --git a/tools/generate_chain.py b/tools/generate_chain.py index b1501c6f58c5..87c135539f7e 100644 --- a/tools/generate_chain.py +++ b/tools/generate_chain.py @@ -5,9 +5,10 @@ import sqlite3 import sys import time +from collections.abc import Iterator from contextlib import closing, contextmanager from pathlib import Path -from typing import Iterator, List, Optional +from typing import Optional import click import zstd @@ -97,7 +98,7 @@ def main(length: int, fill_rate: int, profile: bool, block_refs: bool, output: O wallet = bt.get_farmer_wallet_tool() farmer_puzzlehash = wallet.get_new_puzzlehash() pool_puzzlehash = wallet.get_new_puzzlehash() - transaction_blocks: List[uint32] = [] + transaction_blocks: list[uint32] = [] blocks = bt.get_consecutive_blocks( 3, @@ -107,7 +108,7 @@ def main(length: int, fill_rate: int, profile: bool, block_refs: bool, output: O genesis_timestamp=uint64(1234567890), ) - unspent_coins: List[Coin] = [] + unspent_coins: list[Coin] = [] for b in blocks: for coin in b.get_included_reward_coins(): @@ -133,8 +134,8 @@ def main(length: int, fill_rate: int, profile: bool, block_refs: bool, output: O with enable_profiler(profile, b.height): start_time = time.monotonic() - new_coins: List[Coin] = [] - spend_bundles: List[SpendBundle] = [] + new_coins: list[Coin] = [] + spend_bundles: list[SpendBundle] = [] i = 0 for i in range(num_tx_per_block): if unspent_coins == []: @@ -145,7 +146,7 @@ def main(length: int, fill_rate: int, profile: bool, block_refs: bool, output: O new_coins.extend(bundle.additions()) spend_bundles.append(bundle) - block_references: List[uint32] + block_references: list[uint32] if block_refs: block_references = random.sample(transaction_blocks, min(len(transaction_blocks), 512)) random.shuffle(block_references) diff --git a/tools/manage_clvm.py b/tools/manage_clvm.py index d69933e31f1a..88acc5de9c1f 100644 --- a/tools/manage_clvm.py +++ b/tools/manage_clvm.py @@ -51,8 +51,8 @@ class CacheEntry(typing_extensions.TypedDict): hash: str -CacheEntries = typing.Dict[str, CacheEntry] -CacheVersion = typing.List[int] +CacheEntries = dict[str, CacheEntry] +CacheVersion = list[int] current_cache_version: CacheVersion = [1] @@ -113,10 +113,10 @@ class ClvmPaths: clvm: pathlib.Path hex: pathlib.Path hash: str - missing_files: typing.List[str] + missing_files: list[str] @classmethod - def from_clvm(cls, clvm: pathlib.Path, hash_dict: typing.Dict[str, str] = {}) -> ClvmPaths: + def from_clvm(cls, clvm: pathlib.Path, hash_dict: dict[str, str] = {}) -> ClvmPaths: stem_filename = clvm.name[: -len(clsp_suffix)] hex_path = clvm.with_name(stem_filename + hex_suffix) missing_files = [] @@ -139,7 +139,7 @@ class ClvmBytes: hash: bytes @classmethod - def from_clvm_paths(cls, paths: ClvmPaths, hash_dict: typing.Dict[str, str] = {}) -> ClvmBytes: + def from_clvm_paths(cls, paths: ClvmPaths, hash_dict: dict[str, str] = {}) -> ClvmBytes: hex_bytes = paths.hex.read_bytes() return cls( hex=hex_bytes, @@ -159,13 +159,13 @@ def from_hex_bytes(cls, hex_bytes: bytes) -> ClvmBytes: # These files have the wrong extension for now so we'll just manually exclude them -excludes: typing.Set[str] = set() +excludes: set[str] = set() def find_stems( - top_levels: typing.Set[str], + top_levels: set[str], suffixes: typing.Mapping[str, str] = all_suffixes, -) -> typing.Dict[str, typing.Set[pathlib.Path]]: +) -> dict[str, set[pathlib.Path]]: found_stems = { name: { path.with_name(path.name[: -len(suffix)]) @@ -207,7 +207,7 @@ def check(use_cache: bool) -> int: used_excludes = set() overall_fail = False - HASHES: typing.Dict[str, str] = json.loads(hashes_path.read_text()) if hashes_path.exists() else {} + HASHES: dict[str, str] = json.loads(hashes_path.read_text()) if hashes_path.exists() else {} cache: Cache if not use_cache: @@ -259,8 +259,8 @@ def check(use_cache: bool) -> int: print(f"FAIL : {stem_path.name + clvm_suffix} contains `(mod`") break - missing_files: typing.List[str] = [] - all_hash_stems: typing.List[str] = [] + missing_files: list[str] = [] + all_hash_stems: list[str] = [] print() print("Checking that all existing .clsp files compile to .clsp.hex that match existing caches:") @@ -358,7 +358,7 @@ def check(use_cache: bool) -> int: def build() -> int: overall_fail = False - HASHES: typing.Dict[str, str] = json.loads(hashes_path.read_text()) if hashes_path.exists() else {} + HASHES: dict[str, str] = json.loads(hashes_path.read_text()) if hashes_path.exists() else {} found_stems = find_stems(top_levels, suffixes={"clsp": clsp_suffix}) hash_stems = [] From 658d3562a99d55e4f2b58356e11d8d3f11ad86a1 Mon Sep 17 00:00:00 2001 From: William Blanke Date: Wed, 16 Oct 2024 18:12:37 -0700 Subject: [PATCH 43/69] fix bluebox --- chia/timelord/timelord.py | 20 ++++++++++++-- chia/util/vdf_prover.py | 1 + poetry.lock | 58 +++++++++++++++++++-------------------- pyproject.toml | 2 +- 4 files changed, 49 insertions(+), 32 deletions(-) diff --git a/chia/timelord/timelord.py b/chia/timelord/timelord.py index 462f4783ca8b..4fd5a6c126cc 100644 --- a/chia/timelord/timelord.py +++ b/chia/timelord/timelord.py @@ -7,12 +7,13 @@ import logging import os import random +import tempfile import time import traceback from collections.abc import AsyncIterator from concurrent.futures import ThreadPoolExecutor from pathlib import Path -from typing import TYPE_CHECKING, Any, ClassVar, Optional, cast +from typing import IO, TYPE_CHECKING, Any, ClassVar, Optional, cast from chiavdf import create_discriminant, prove @@ -53,7 +54,7 @@ class BlueboxProcessData(Streamable): iters: uint64 -def prove_bluebox_slow(payload: bytes) -> bytes: +def prove_bluebox_slow(payload: bytes, executor_shutdown_tempfile_name: str) -> bytes: bluebox_process_data = BlueboxProcessData.from_bytes(payload) initial_el = b"\x08" + (b"\x00" * 99) return cast( @@ -63,10 +64,15 @@ def prove_bluebox_slow(payload: bytes) -> bytes: initial_el, bluebox_process_data.size_bits, bluebox_process_data.iters, + executor_shutdown_tempfile_name, ), ) +def _create_shutdown_file() -> IO[bytes]: + return tempfile.NamedTemporaryFile(prefix="chia_timelord_executor_shutdown_trigger") + + class Timelord: if TYPE_CHECKING: from chia.rpc.rpc_server import RpcServiceProtocol @@ -138,6 +144,7 @@ def __init__(self, root_path: Path, config: dict[str, Any], constants: Consensus self.pending_bluebox_info: list[tuple[float, timelord_protocol.RequestCompactProofOfTime]] = [] self.last_active_time = time.time() self.max_allowed_inactivity_time = 60 + self._executor_shutdown_tempfile: Optional[IO[bytes]] = None self.bluebox_pool: Optional[ThreadPoolExecutor] = None @contextlib.asynccontextmanager @@ -156,6 +163,7 @@ async def manage(self) -> AsyncIterator[None]: if os.name == "nt" or slow_bluebox: # `vdf_client` doesn't build on windows, use `prove()` from chiavdf. workers = self.config.get("slow_bluebox_process_count", 1) + self._executor_shutdown_tempfile = _create_shutdown_file() self.bluebox_pool = ThreadPoolExecutor( max_workers=workers, ) @@ -169,6 +177,8 @@ async def manage(self) -> AsyncIterator[None]: yield finally: self._shut_down = True + if self._executor_shutdown_tempfile is not None: + self._executor_shutdown_tempfile.close() for task in self.process_communication_tasks: task.cancel() if self.main_loop is not None: @@ -1168,6 +1178,7 @@ async def _manage_discriminant_queue_sanitizer_slow(self, pool: ThreadPoolExecut pool, prove_bluebox_slow, bytes(bluebox_process_data), + "" if self._executor_shutdown_tempfile is None else self._executor_shutdown_tempfile.name, ) t2 = time.time() delta = t2 - t1 @@ -1175,6 +1186,11 @@ async def _manage_discriminant_queue_sanitizer_slow(self, pool: ThreadPoolExecut ips = picked_info.new_proof_of_time.number_of_iterations / delta else: ips = 0 + + if len(proof) == 0: + log.info(f"Empty VDF proof returned: {picked_info.height}. Time: {delta}s. IPS: {ips}.") + return + log.info(f"Finished compact proof: {picked_info.height}. Time: {delta}s. IPS: {ips}.") output = proof[:100] proof_part = proof[100:200] diff --git a/chia/util/vdf_prover.py b/chia/util/vdf_prover.py index 3086733115ce..2ac23cf675af 100644 --- a/chia/util/vdf_prover.py +++ b/chia/util/vdf_prover.py @@ -22,6 +22,7 @@ def get_vdf_info_and_proof( vdf_input.data, constants.DISCRIMINANT_SIZE_BITS, number_iters, + "", ) output = ClassgroupElement.create(result[:form_size]) diff --git a/poetry.lock b/poetry.lock index a9c98fc50c84..9eece0490bb0 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. [[package]] name = "aiofiles" @@ -930,37 +930,37 @@ files = [ [[package]] name = "chiavdf" -version = "1.1.6" +version = "1.1.8" description = "Chia vdf verification (wraps C++)" optional = false python-versions = ">=3.8" files = [ - {file = "chiavdf-1.1.6-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:0e7c7a0032d14ef11ed12bb6144437d4057d1c2ce435e1da7165659422e8e486"}, - {file = "chiavdf-1.1.6-cp310-cp310-macosx_13_0_x86_64.whl", hash = "sha256:728fe9fa328e134f9b08c46d4e535e6d24e55a0fbbf98c1008a32d63b22e1a3b"}, - {file = "chiavdf-1.1.6-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:34b682795f5c1348cf6b95fb60acf69649a7bd9fac8b890c9cecff8654798f36"}, - {file = "chiavdf-1.1.6-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:89eb391a43ee804bf410a76748d5a725fdb18989e17e9dffec4de5f57413c0f4"}, - {file = "chiavdf-1.1.6-cp310-cp310-win_amd64.whl", hash = "sha256:ca57ceb1e0410bcde5d7b6fdcfa1d9a5b05fb0c6e6d78d6a6cc6df6518eb6e09"}, - {file = "chiavdf-1.1.6-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:a1b109417191071590f36268bd8f7c633b708f023dfe52372756ee3ef9f2466a"}, - {file = "chiavdf-1.1.6-cp311-cp311-macosx_13_0_x86_64.whl", hash = "sha256:c7766e94c84fae64b95e4af16c63a9a44a3e9ba382f896ff268048e40be8f9f6"}, - {file = "chiavdf-1.1.6-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:8ac0719cd64b22895121fdbc5a3497ce96ef7e5ba88b0d57c4a6146114a80c11"}, - {file = "chiavdf-1.1.6-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:b3b3fd2631b3e3b795b14d1d6143bad6aa73ca6f8cd67824da551a9a8ba95435"}, - {file = "chiavdf-1.1.6-cp311-cp311-win_amd64.whl", hash = "sha256:c517489d01b7fe775f7230aebea57cfdd2257300b5855c27fb39b5818f912138"}, - {file = "chiavdf-1.1.6-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:3f0c662d45aa99a1121ac4b79588f328bdd88fe9739d06785a5a18454bb16388"}, - {file = "chiavdf-1.1.6-cp312-cp312-macosx_13_0_x86_64.whl", hash = "sha256:523125900b3909aeaeca11e4fe3406316f1f7b00f5323f60035bdece7c27d247"}, - {file = "chiavdf-1.1.6-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:481457538da7f71e46f7823e1f092a4768cf379e06d2b29e0e2fa167045b5ce6"}, - {file = "chiavdf-1.1.6-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:0a4ec91e53729c966f6fb43c63e3355dc585dd9c00d020176d214f86afa0af87"}, - {file = "chiavdf-1.1.6-cp312-cp312-win_amd64.whl", hash = "sha256:2db5542a7e11af42a03c63709e1e71ac119b25f694cae640e450369deee32003"}, - {file = "chiavdf-1.1.6-cp38-cp38-macosx_13_0_arm64.whl", hash = "sha256:d30c6ef55d8bbccda0fc96fdca295acb47673fb729287e58691c5da2248ce264"}, - {file = "chiavdf-1.1.6-cp38-cp38-macosx_13_0_x86_64.whl", hash = "sha256:978311d09e07bbd0c807fd8dee8d243a01b8f9b6bebe909b5a33a75a6e6fd244"}, - {file = "chiavdf-1.1.6-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:de4d4e5123724b23259bb3fbc9d89e8e225e129e518b3325446b994624bfd880"}, - {file = "chiavdf-1.1.6-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:9f32049e97b407bc5e7e2536c91589026272a4c608fb0f22dd4e8e982fa740c8"}, - {file = "chiavdf-1.1.6-cp38-cp38-win_amd64.whl", hash = "sha256:88a752a9f3b4cc7cfec517af0b74eee15581474d6f27c4f21cd468ba1a29878d"}, - {file = "chiavdf-1.1.6-cp39-cp39-macosx_13_0_arm64.whl", hash = "sha256:9b7f6cd025cc71128f0a467d07eb1ea0b76a074892a50ae76c2094fc8deb93d4"}, - {file = "chiavdf-1.1.6-cp39-cp39-macosx_13_0_x86_64.whl", hash = "sha256:cbdd824114d28e4c0c17ba1e14492b04f440b7cf6697ad582d541b9f7e01e79b"}, - {file = "chiavdf-1.1.6-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:183f8380170ac749d2b479172394118d2536b0a4d02ef56c0e630d22d545e7a3"}, - {file = "chiavdf-1.1.6-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:612518b22da3339d2a1f42711d53d4f0353c21aad1683ad8b86c5ef0e2e49871"}, - {file = "chiavdf-1.1.6-cp39-cp39-win_amd64.whl", hash = "sha256:5cc41e58f751ed156f475905d8d4415e6f8285ce3ee64127496325ea62af20c2"}, - {file = "chiavdf-1.1.6.tar.gz", hash = "sha256:bf32ad4f114db49c9839ff18b7fc704582e162923780751420838830cd92bac6"}, + {file = "chiavdf-1.1.8-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:9cff805d701dacb5c0a715e64b6cc83b547c7d0b4b30ad2387d72949714c7084"}, + {file = "chiavdf-1.1.8-cp310-cp310-macosx_13_0_x86_64.whl", hash = "sha256:5661249abe84b89c3f6897e56230e13aaf4ff52497289daca391d48fe5a40fa3"}, + {file = "chiavdf-1.1.8-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:5b89e98bd0417675dfb447deafe02ff3dcd9c756bb3d6e5627a4cae5698fcdc1"}, + {file = "chiavdf-1.1.8-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:d470e8a4ace49fcdb44bc90bd1b137dcbcbdc983154105725f760306295b1089"}, + {file = "chiavdf-1.1.8-cp310-cp310-win_amd64.whl", hash = "sha256:bb04bc702bab305f5b9c4e4769ae4e9f2c424733c88ff3e455f806d663698a6c"}, + {file = "chiavdf-1.1.8-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:d12cd8c6e1bd30711aafa9a08cef2841c819821ae833a1bb56ccce16852f2992"}, + {file = "chiavdf-1.1.8-cp311-cp311-macosx_13_0_x86_64.whl", hash = "sha256:c109871a72c4110ae7257f68e1d71b9b64bd4bd296a0d10aa9370883aefaa79e"}, + {file = "chiavdf-1.1.8-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:dc37331ca8facfd994522fe47a4e17f96d3a0eead13d5ccb4564c06362bed3ef"}, + {file = "chiavdf-1.1.8-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:76c616dd2d1fc7d3edc81eeefb304b63e74b3d19187e422351af2ca88cf719ad"}, + {file = "chiavdf-1.1.8-cp311-cp311-win_amd64.whl", hash = "sha256:da4b65abd16fb49aeca7fb49fd5f3eb7b307419af80ac675e59eff0d9343a6a2"}, + {file = "chiavdf-1.1.8-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:b589e0cd6b4746bcb698ca4247565c1a918fef9a6186d4db8adc17f9470e683a"}, + {file = "chiavdf-1.1.8-cp312-cp312-macosx_13_0_x86_64.whl", hash = "sha256:7027eea1f45a5ededfbcc472e2592e4730de60e8191b6c8661acc8a8bfeba9ed"}, + {file = "chiavdf-1.1.8-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:553251b254fdf5e174a494383102e9c4f894cfeabe46d0197107f3674c182a6a"}, + {file = "chiavdf-1.1.8-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:af6a1b6f491d76f305badf8cef6c3bfe70853198b2ce29f1a643e81f64615d75"}, + {file = "chiavdf-1.1.8-cp312-cp312-win_amd64.whl", hash = "sha256:362b7633e46001857733706431002cfefe0bcc402f2cf0c913d1a6e6fb691aca"}, + {file = "chiavdf-1.1.8-cp38-cp38-macosx_13_0_arm64.whl", hash = "sha256:c258c0ee6298494bf03ccfb7b992191e2cf210c247eace66ca6792d6cf73ead8"}, + {file = "chiavdf-1.1.8-cp38-cp38-macosx_13_0_x86_64.whl", hash = "sha256:366ea2ad88ba96eadc62b4aa7be1a6925f832ad2da3b366e52757f0a66b78424"}, + {file = "chiavdf-1.1.8-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:4efae12d61bd8efaa5f6c062c3373bd5617236f4e8347732228b4281284f61c9"}, + {file = "chiavdf-1.1.8-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:585ed4e00deb2f5e16aff10b37e08fd23117423cac0bd12eb9e35e787aaea744"}, + {file = "chiavdf-1.1.8-cp38-cp38-win_amd64.whl", hash = "sha256:13258a8828c4169d800dd13946e7ec7748954fb557d5bac7bad84642e900d144"}, + {file = "chiavdf-1.1.8-cp39-cp39-macosx_13_0_arm64.whl", hash = "sha256:a7c2631edc1836c579b88e600cdd930ae9d2cc0bb02c368a0818d1bc9e571d59"}, + {file = "chiavdf-1.1.8-cp39-cp39-macosx_13_0_x86_64.whl", hash = "sha256:65bf866200d899b5843bd0a5fe0d63acb6f33075d151fa0d82c202c012b5482d"}, + {file = "chiavdf-1.1.8-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:b7d405428c429666bead8e27e563a95f0d3020a10c7ee2f3641ef61da3463405"}, + {file = "chiavdf-1.1.8-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:76ca77f7cbca2f53d0d8f42531df7dfce1aa01235e5e278b102693d5e2dda39e"}, + {file = "chiavdf-1.1.8-cp39-cp39-win_amd64.whl", hash = "sha256:af76c53e6819dfb9d00388111e074fbc8ce0fa408d4dd9cafcd444adb7962cdf"}, + {file = "chiavdf-1.1.8.tar.gz", hash = "sha256:e4c6904953d44b6097cbdce1621821c5eba1c47ad120c6ffbbc24c5815eff33f"}, ] [[package]] @@ -3438,4 +3438,4 @@ upnp = ["miniupnpc"] [metadata] lock-version = "2.0" python-versions = ">=3.9, <3.13" -content-hash = "f5fa932e4e2facb84efb24d244d477db40be0eb4307378f3bea7355c5ccbb2e0" +content-hash = "db54f56de18ee245440aea61e7a96bdbc73190327b1dbd69f7569ff57abd0cc0" diff --git a/pyproject.toml b/pyproject.toml index f8e80ab793f4..da1dec873351 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -47,7 +47,7 @@ boto3 = "1.34.143" # AWS S3 for Data Layer S3 plugin chiabip158 = "1.5.1" # bip158-style wallet filters chiapos = "2.0.4" # proof of space chia_rs = "0.15.0" -chiavdf = "1.1.6" # timelord and vdf verification +chiavdf = "1.1.8" # timelord and vdf verification click = "8.1.7" # For the CLI clvm = "0.9.10" clvm_tools = "0.4.9" # Currying Program.to other conveniences From f69233aa088db882bf70205ffc0588c324d6497f Mon Sep 17 00:00:00 2001 From: Richard Kiss Date: Thu, 17 Oct 2024 14:25:05 -0700 Subject: [PATCH 44/69] Move some files closer to where they should live. (#18465) * Move to `chia.full_node.util.safe_cancel_task` * Move to `chia/wallet/util/pprint.py` * Separate out `IPAddress` into `ip_address.py` --- chia/_tests/util/test_network.py | 3 +- chia/_tests/util/test_pprint.py | 2 +- chia/cmds/check_wallet_db.py | 2 +- chia/full_node/full_node.py | 2 +- chia/full_node/util/__init__.py | 0 chia/{ => full_node}/util/safe_cancel_task.py | 0 chia/server/node_discovery.py | 3 +- chia/types/peer_info.py | 2 +- chia/util/ip_address.py | 39 +++++++++++++++++++ chia/util/network.py | 37 +----------------- chia/{ => wallet}/util/pprint.py | 0 11 files changed, 49 insertions(+), 41 deletions(-) create mode 100644 chia/full_node/util/__init__.py rename chia/{ => full_node}/util/safe_cancel_task.py (100%) create mode 100644 chia/util/ip_address.py rename chia/{ => wallet}/util/pprint.py (100%) diff --git a/chia/_tests/util/test_network.py b/chia/_tests/util/test_network.py index 804920b4796a..9d63459dd95c 100644 --- a/chia/_tests/util/test_network.py +++ b/chia/_tests/util/test_network.py @@ -7,7 +7,8 @@ import pytest -from chia.util.network import IPAddress, resolve +from chia.util.ip_address import IPAddress +from chia.util.network import resolve @pytest.mark.anyio diff --git a/chia/_tests/util/test_pprint.py b/chia/_tests/util/test_pprint.py index 369a774ef74a..59160a3f423a 100644 --- a/chia/_tests/util/test_pprint.py +++ b/chia/_tests/util/test_pprint.py @@ -1,6 +1,6 @@ from __future__ import annotations -from chia.util.pprint import print_compact_ranges +from chia.wallet.util.pprint import print_compact_ranges def test_print_compact_ranges() -> None: diff --git a/chia/cmds/check_wallet_db.py b/chia/cmds/check_wallet_db.py index f0f0a4523407..6d7e919ff41f 100644 --- a/chia/cmds/check_wallet_db.py +++ b/chia/cmds/check_wallet_db.py @@ -11,7 +11,7 @@ from chia.util.collection import find_duplicates from chia.util.db_synchronous import db_synchronous_on from chia.util.db_wrapper import DBWrapper2, execute_fetchone -from chia.util.pprint import print_compact_ranges +from chia.wallet.util.pprint import print_compact_ranges from chia.wallet.util.wallet_types import WalletType # TODO: Check for missing paired wallets (eg. No DID wallet for an NFT) diff --git a/chia/full_node/full_node.py b/chia/full_node/full_node.py index 6469b0c71adb..615d8eebb7a6 100644 --- a/chia/full_node/full_node.py +++ b/chia/full_node/full_node.py @@ -42,6 +42,7 @@ from chia.full_node.subscriptions import PeerSubscriptions, peers_for_spend_bundle from chia.full_node.sync_store import Peak, SyncStore from chia.full_node.tx_processing_queue import TransactionQueue +from chia.full_node.util.safe_cancel_task import cancel_task_safe from chia.full_node.weight_proof import WeightProofHandler from chia.protocols import farmer_protocol, full_node_protocol, timelord_protocol, wallet_protocol from chia.protocols.farmer_protocol import SignagePointSourceData, SPSubSlotSourceData, SPVDFSourceData @@ -85,7 +86,6 @@ from chia.util.log_exceptions import log_exceptions from chia.util.path import path_from_root from chia.util.profiler import enable_profiler, mem_profile_task, profile_task -from chia.util.safe_cancel_task import cancel_task_safe # This is the result of calling peak_post_processing, which is then fed into peak_post_processing_2 diff --git a/chia/full_node/util/__init__.py b/chia/full_node/util/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/chia/util/safe_cancel_task.py b/chia/full_node/util/safe_cancel_task.py similarity index 100% rename from chia/util/safe_cancel_task.py rename to chia/full_node/util/safe_cancel_task.py diff --git a/chia/server/node_discovery.py b/chia/server/node_discovery.py index e6269e348894..31f2d204386f 100644 --- a/chia/server/node_discovery.py +++ b/chia/server/node_discovery.py @@ -24,7 +24,8 @@ from chia.types.peer_info import PeerInfo, TimestampedPeerInfo, UnresolvedPeerInfo from chia.util.hash import std_hash from chia.util.ints import uint16, uint64 -from chia.util.network import IPAddress, resolve +from chia.util.ip_address import IPAddress +from chia.util.network import resolve MAX_PEERS_RECEIVED_PER_REQUEST = 1000 MAX_TOTAL_PEERS_RECEIVED = 3000 diff --git a/chia/types/peer_info.py b/chia/types/peer_info.py index 927e89d6b53e..16ccf709ab54 100644 --- a/chia/types/peer_info.py +++ b/chia/types/peer_info.py @@ -5,7 +5,7 @@ from typing import Union from chia.util.ints import uint16, uint64 -from chia.util.network import IPAddress +from chia.util.ip_address import IPAddress from chia.util.streamable import Streamable, streamable diff --git a/chia/util/ip_address.py b/chia/util/ip_address.py new file mode 100644 index 000000000000..3ab02eca3b1e --- /dev/null +++ b/chia/util/ip_address.py @@ -0,0 +1,39 @@ +from __future__ import annotations + +from dataclasses import dataclass +from ipaddress import IPv4Address, IPv6Address, ip_address +from typing import Union + + +@dataclass(frozen=True) +class IPAddress: + _inner: Union[IPv4Address, IPv6Address] + + @classmethod + def create(cls, ip: str) -> IPAddress: + return cls(ip_address(ip)) + + def __int__(self) -> int: + return int(self._inner) + + def __str__(self) -> str: + return str(self._inner) + + def __repr__(self) -> str: + return repr(self._inner) + + @property + def packed(self) -> bytes: + return self._inner.packed + + @property + def is_private(self) -> bool: + return self._inner.is_private + + @property + def is_v4(self) -> bool: + return self._inner.version == 4 + + @property + def is_v6(self) -> bool: + return self._inner.version == 6 diff --git a/chia/util/network.py b/chia/util/network.py index cd3519223999..aaba02b33ff8 100644 --- a/chia/util/network.py +++ b/chia/util/network.py @@ -7,7 +7,7 @@ import ssl from collections.abc import Iterable from dataclasses import dataclass -from ipaddress import IPv4Address, IPv4Network, IPv6Address, IPv6Network, ip_address +from ipaddress import IPv4Network, IPv6Network, ip_address from typing import Any, Literal, Optional, Union from aiohttp import web @@ -17,40 +17,7 @@ from chia.server.outbound_message import NodeType from chia.types.blockchain_format.sized_bytes import bytes32 from chia.util.ints import uint16 - - -@dataclass(frozen=True) -class IPAddress: - _inner: Union[IPv4Address, IPv6Address] - - @classmethod - def create(cls, ip: str) -> IPAddress: - return cls(ip_address(ip)) - - def __int__(self) -> int: - return int(self._inner) - - def __str__(self) -> str: - return str(self._inner) - - def __repr__(self) -> str: - return repr(self._inner) - - @property - def packed(self) -> bytes: - return self._inner.packed - - @property - def is_private(self) -> bool: - return self._inner.is_private - - @property - def is_v4(self) -> bool: - return self._inner.version == 4 - - @property - def is_v6(self) -> bool: - return self._inner.version == 6 +from chia.util.ip_address import IPAddress @final diff --git a/chia/util/pprint.py b/chia/wallet/util/pprint.py similarity index 100% rename from chia/util/pprint.py rename to chia/wallet/util/pprint.py From bbe507744b2dbb5aeb580a762f41427032b751b5 Mon Sep 17 00:00:00 2001 From: Arvid Norberg Date: Fri, 18 Oct 2024 18:15:16 +0200 Subject: [PATCH 45/69] [CHIA-1561] validate UnfinishedBlocks and signature in thread pool (#18730) validate UnfinishedBlocks and signature in the blockchain thread pool. Previously we validate unfinished block signatures in the main thread, because the BLSCache used to be single-threaded --- chia/consensus/blockchain.py | 21 +------- chia/consensus/multiprocess_validation.py | 33 +----------- chia/full_node/full_node.py | 65 ++++++++++++++--------- chia/full_node/full_node_api.py | 7 +-- 4 files changed, 46 insertions(+), 80 deletions(-) diff --git a/chia/consensus/blockchain.py b/chia/consensus/blockchain.py index b927a2013e4f..4152b191d789 100644 --- a/chia/consensus/blockchain.py +++ b/chia/consensus/blockchain.py @@ -22,7 +22,7 @@ from chia.consensus.find_fork_point import lookup_fork_chain from chia.consensus.full_block_to_block_record import block_to_block_record from chia.consensus.get_block_generator import get_block_generator -from chia.consensus.multiprocess_validation import PreValidationResult, _run_generator +from chia.consensus.multiprocess_validation import PreValidationResult from chia.full_node.block_height_map import BlockHeightMap from chia.full_node.block_store import BlockStore from chia.full_node.coin_store import CoinStore @@ -40,7 +40,7 @@ from chia.types.unfinished_header_block import UnfinishedHeaderBlock from chia.types.weight_proof import SubEpochChallengeSegment from chia.util.cpu import available_logical_cores -from chia.util.errors import ConsensusError, Err +from chia.util.errors import Err from chia.util.generator_tools import get_block_header from chia.util.hash import std_hash from chia.util.inline_executor import InlineExecutor @@ -784,23 +784,6 @@ async def validate_unfinished_block( return PreValidationResult(None, required_iters, cost_result, False, uint32(0)) - async def run_generator(self, unfinished_block: bytes, generator: BlockGenerator, height: uint32) -> NPCResult: - task = asyncio.get_running_loop().run_in_executor( - self.pool, - _run_generator, - self.constants, - unfinished_block, - bytes(generator), - height, - ) - npc_result_bytes = await task - if npc_result_bytes is None: - raise ConsensusError(Err.UNKNOWN) - ret: NPCResult = NPCResult.from_bytes(npc_result_bytes) - if ret.error is not None: - raise ConsensusError(Err(ret.error)) - return ret - def contains_block(self, header_hash: bytes32) -> bool: """ True if we have already added this block to the chain. This may return false for orphan blocks diff --git a/chia/consensus/multiprocess_validation.py b/chia/consensus/multiprocess_validation.py index 5f0f7393c63d..a4a1e542bc8b 100644 --- a/chia/consensus/multiprocess_validation.py +++ b/chia/consensus/multiprocess_validation.py @@ -16,7 +16,6 @@ from chia.consensus.block_record import BlockRecord from chia.consensus.blockchain_interface import BlockRecordsProtocol, BlocksProtocol from chia.consensus.constants import ConsensusConstants -from chia.consensus.cost_calculator import NPCResult from chia.consensus.full_block_to_block_record import block_to_block_record from chia.consensus.get_block_challenge import get_block_challenge from chia.consensus.get_block_generator import get_block_generator @@ -28,11 +27,10 @@ from chia.types.blockchain_format.sub_epoch_summary import SubEpochSummary from chia.types.full_block import FullBlock from chia.types.generator_types import BlockGenerator -from chia.types.unfinished_block import UnfinishedBlock from chia.types.validation_state import ValidationState from chia.util.augmented_chain import AugmentedBlockchain from chia.util.condition_tools import pkm_pairs -from chia.util.errors import Err, ValidationError +from chia.util.errors import Err from chia.util.generator_tools import get_block_header, tx_removals_and_additions from chia.util.ints import uint16, uint32, uint64 from chia.util.streamable import Streamable, streamable @@ -256,32 +254,3 @@ async def pre_validate_blocks_multiprocessing( # Collect all results into one flat list return list(await asyncio.gather(*futures)) - - -def _run_generator( - constants: ConsensusConstants, - unfinished_block_bytes: bytes, - block_generator_bytes: bytes, - height: uint32, -) -> Optional[bytes]: - """ - Runs the CLVM generator from bytes inputs. This is meant to be called under a ProcessPoolExecutor, in order to - validate the heavy parts of a block (clvm program) in a different process. - """ - try: - unfinished_block: UnfinishedBlock = UnfinishedBlock.from_bytes(unfinished_block_bytes) - assert unfinished_block.transactions_info is not None - block_generator: BlockGenerator = BlockGenerator.from_bytes(block_generator_bytes) - assert block_generator.program == unfinished_block.transactions_generator - npc_result: NPCResult = get_name_puzzle_conditions( - block_generator, - min(constants.MAX_BLOCK_COST_CLVM, unfinished_block.transactions_info.cost), - mempool_mode=False, - height=height, - constants=constants, - ) - return bytes(npc_result) - except ValidationError as e: - return bytes(NPCResult(uint16(e.code.value), None)) - except Exception: - return bytes(NPCResult(uint16(Err.UNKNOWN.value), None)) diff --git a/chia/full_node/full_node.py b/chia/full_node/full_node.py index 615d8eebb7a6..f30d45460a1a 100644 --- a/chia/full_node/full_node.py +++ b/chia/full_node/full_node.py @@ -15,7 +15,13 @@ from pathlib import Path from typing import TYPE_CHECKING, Any, Callable, ClassVar, Optional, TextIO, Union, cast, final -from chia_rs import AugSchemeMPL, BLSCache +from chia_rs import ( + AugSchemeMPL, + BLSCache, + get_flags_for_height_and_constants, + run_block_generator, + run_block_generator2, +) from packaging.version import Version from chia.consensus.block_body_validation import ForkInfo @@ -26,7 +32,6 @@ from chia.consensus.constants import ConsensusConstants from chia.consensus.cost_calculator import NPCResult from chia.consensus.difficulty_adjustment import get_next_sub_slot_iters_and_difficulty -from chia.consensus.get_block_generator import get_block_generator from chia.consensus.make_sub_epoch_summary import next_sub_epoch_summary from chia.consensus.multiprocess_validation import PreValidationResult, pre_validate_blocks_multiprocessing from chia.consensus.pot_iterations import calculate_sp_iters @@ -63,7 +68,6 @@ from chia.types.coin_record import CoinRecord from chia.types.end_of_slot_bundle import EndOfSubSlotBundle from chia.types.full_block import FullBlock -from chia.types.generator_types import BlockGenerator from chia.types.header_block import HeaderBlock from chia.types.mempool_inclusion_status import MempoolInclusionStatus from chia.types.mempool_item import MempoolItem @@ -75,7 +79,6 @@ from chia.types.weight_proof import WeightProof from chia.util.bech32m import encode_puzzle_hash from chia.util.check_fork_next_block import check_fork_next_block -from chia.util.condition_tools import pkm_pairs from chia.util.config import process_config_start_method from chia.util.db_synchronous import db_synchronous_on from chia.util.db_version import lookup_db_version, set_db_version_async @@ -2001,7 +2004,6 @@ async def add_unfinished_block( block: UnfinishedBlock, peer: Optional[WSChiaConnection], farmed_block: bool = False, - block_bytes: Optional[bytes] = None, ) -> None: """ We have received an unfinished block, either created by us, or from another peer. @@ -2080,29 +2082,44 @@ async def add_unfinished_block( if block.transactions_generator is not None: pre_validation_start = time.monotonic() assert block.transactions_info is not None - try: - block_generator: Optional[BlockGenerator] = await get_block_generator( - self.blockchain.lookup_block_generators, block + if len(block.transactions_generator_ref_list) > 0: + generator_refs = set(block.transactions_generator_ref_list) + generators: dict[uint32, bytes] = await self.blockchain.lookup_block_generators( + block.prev_header_hash, generator_refs ) - except ValueError: - raise ConsensusError(Err.GENERATOR_REF_HAS_NO_GENERATOR) - if block_generator is None: - raise ConsensusError(Err.GENERATOR_REF_HAS_NO_GENERATOR) - if block_bytes is None: - block_bytes = bytes(block) + generator_args = [generators[height] for height in block.transactions_generator_ref_list] + else: + generator_args = [] height = uint32(0) if prev_b is None else uint32(prev_b.height + 1) - npc_result = await self.blockchain.run_generator(block_bytes, block_generator, height) - pre_validation_time = time.monotonic() - pre_validation_start + flags = get_flags_for_height_and_constants(height, self.constants) - # blockchain.run_generator throws on errors, so npc_result is - # guaranteed to represent a successful run - assert npc_result.conds is not None - pairs_pks, pairs_msgs = pkm_pairs(npc_result.conds, self.constants.AGG_SIG_ME_ADDITIONAL_DATA) - if not self._bls_cache.aggregate_verify( - pairs_pks, pairs_msgs, block.transactions_info.aggregated_signature - ): - raise ConsensusError(Err.BAD_AGGREGATE_SIGNATURE) + # on mainnet we won't receive unfinished blocks for heights + # below the hard fork activation, but we have tests where we do + if height >= self.constants.HARD_FORK_HEIGHT: + run_block = run_block_generator2 + else: + run_block = run_block_generator + + # run_block() also validates the signature + err, conditions = await asyncio.get_running_loop().run_in_executor( + self.blockchain.pool, + run_block, + bytes(block.transactions_generator), + generator_args, + min(self.constants.MAX_BLOCK_COST_CLVM, block.transactions_info.cost), + flags, + block.transactions_info.aggregated_signature, + self._bls_cache, + self.constants, + ) + + if err is not None: + raise ConsensusError(Err(err)) + assert conditions is not None + assert conditions.validated_signature + npc_result = NPCResult(None, conditions) + pre_validation_time = time.monotonic() - pre_validation_start async with self.blockchain.priority_mutex.acquire(priority=BlockchainMutexPriority.high): # TODO: pre-validate VDFs outside of lock diff --git a/chia/full_node/full_node_api.py b/chia/full_node/full_node_api.py index 409410914899..ab0d8853a6a8 100644 --- a/chia/full_node/full_node_api.py +++ b/chia/full_node/full_node_api.py @@ -537,18 +537,15 @@ async def request_unfinished_block2( return msg return None - @api_request(peer_required=True, bytes_required=True) + @api_request(peer_required=True) async def respond_unfinished_block( self, respond_unfinished_block: full_node_protocol.RespondUnfinishedBlock, peer: WSChiaConnection, - respond_unfinished_block_bytes: bytes = b"", ) -> Optional[Message]: if self.full_node.sync_store.get_sync_mode(): return None - await self.full_node.add_unfinished_block( - respond_unfinished_block.unfinished_block, peer, block_bytes=respond_unfinished_block_bytes - ) + await self.full_node.add_unfinished_block(respond_unfinished_block.unfinished_block, peer) return None @api_request(peer_required=True) From 6c2c13a2d0ee5a645175417ffcf975c4cc5f9a61 Mon Sep 17 00:00:00 2001 From: Amine Khaldi Date: Fri, 18 Oct 2024 18:40:49 +0100 Subject: [PATCH 46/69] CHIA-1626 Move some modules to the utils virtual project (#18692) Move some modules to the utils virtual project. --- chia/util/batches.py | 2 ++ chia/util/bech32m.py | 4 +++- chia/util/byte_types.py | 2 ++ chia/util/collection.py | 2 ++ chia/util/cpu.py | 2 ++ chia/util/db_synchronous.py | 2 ++ chia/util/db_version.py | 2 ++ chia/util/db_wrapper.py | 2 ++ chia/util/default_root.py | 2 ++ chia/util/errors.py | 2 ++ chia/util/file_keyring.py | 2 ++ chia/util/files.py | 2 ++ chia/util/hash.py | 4 +++- chia/util/inline_executor.py | 2 ++ chia/util/keychain.py | 6 ++++-- chia/util/keyring_wrapper.py | 2 ++ chia/util/limited_semaphore.py | 2 ++ chia/util/lock.py | 2 ++ chia/util/log_exceptions.py | 2 ++ chia/util/logging.py | 2 ++ chia/util/lru_cache.py | 2 ++ chia/util/math.py | 2 ++ chia/util/paginator.py | 2 ++ chia/util/path.py | 2 ++ chia/util/permissions.py | 2 ++ chia/util/priority_mutex.py | 2 ++ chia/util/profiler.py | 2 ++ chia/util/recursive_replace.py | 2 ++ chia/util/setproctitle.py | 2 ++ chia/util/significant_bits.py | 2 ++ chia/util/streamable.py | 6 ++++-- chia/util/task_timing.py | 2 ++ chia/util/timing.py | 2 ++ chia/util/virtual_project_analysis.py | 2 ++ 34 files changed, 74 insertions(+), 6 deletions(-) diff --git a/chia/util/batches.py b/chia/util/batches.py index 30a26b935561..17a20018867c 100644 --- a/chia/util/batches.py +++ b/chia/util/batches.py @@ -1,3 +1,5 @@ +# Package: utils + from __future__ import annotations from collections.abc import Collection, Iterator diff --git a/chia/util/bech32m.py b/chia/util/bech32m.py index b35576c67d0f..66e3ff496a78 100644 --- a/chia/util/bech32m.py +++ b/chia/util/bech32m.py @@ -1,3 +1,5 @@ +# Package: utils + # Copyright (c) 2017 Pieter Wuille # # Permission is hereby granted, free of charge, to any person obtaining a copy @@ -23,7 +25,7 @@ from collections.abc import Iterable from typing import Optional -from chia.types.blockchain_format.sized_bytes import bytes32 +from chia_rs.sized_bytes import bytes32 # Based on this specification from Pieter Wuille: # https://github.com/sipa/bips/blob/bip-bech32m/bip-bech32m.mediawiki diff --git a/chia/util/byte_types.py b/chia/util/byte_types.py index 828e58014974..76864c5e4913 100644 --- a/chia/util/byte_types.py +++ b/chia/util/byte_types.py @@ -1,3 +1,5 @@ +# Package: utils + from __future__ import annotations diff --git a/chia/util/collection.py b/chia/util/collection.py index 80bae054a028..b57d46e22c43 100644 --- a/chia/util/collection.py +++ b/chia/util/collection.py @@ -1,3 +1,5 @@ +# Package: utils + from __future__ import annotations # Utility Functions for Collections & Sequences diff --git a/chia/util/cpu.py b/chia/util/cpu.py index e41d2b713781..3d7d32207234 100644 --- a/chia/util/cpu.py +++ b/chia/util/cpu.py @@ -1,3 +1,5 @@ +# Package: utils + from __future__ import annotations import os diff --git a/chia/util/db_synchronous.py b/chia/util/db_synchronous.py index 832194c5ac0c..78c484248dba 100644 --- a/chia/util/db_synchronous.py +++ b/chia/util/db_synchronous.py @@ -1,3 +1,5 @@ +# Package: utils + from __future__ import annotations diff --git a/chia/util/db_version.py b/chia/util/db_version.py index f2dc98b3fc4f..448c7ac29636 100644 --- a/chia/util/db_version.py +++ b/chia/util/db_version.py @@ -1,3 +1,5 @@ +# Package: utils + from __future__ import annotations import sqlite3 diff --git a/chia/util/db_wrapper.py b/chia/util/db_wrapper.py index d6592162297c..4453fc4d747e 100644 --- a/chia/util/db_wrapper.py +++ b/chia/util/db_wrapper.py @@ -1,3 +1,5 @@ +# Package: utils + from __future__ import annotations import asyncio diff --git a/chia/util/default_root.py b/chia/util/default_root.py index 1fbffa51497c..0ef024c7f695 100644 --- a/chia/util/default_root.py +++ b/chia/util/default_root.py @@ -1,3 +1,5 @@ +# Package: utils + from __future__ import annotations import os diff --git a/chia/util/errors.py b/chia/util/errors.py index 87defc2ef15c..2f832363bb64 100644 --- a/chia/util/errors.py +++ b/chia/util/errors.py @@ -1,3 +1,5 @@ +# Package: utils + from __future__ import annotations from enum import Enum diff --git a/chia/util/file_keyring.py b/chia/util/file_keyring.py index efdaa69c8b8c..a5fbfdedbda1 100644 --- a/chia/util/file_keyring.py +++ b/chia/util/file_keyring.py @@ -1,3 +1,5 @@ +# Package: utils + from __future__ import annotations import base64 diff --git a/chia/util/files.py b/chia/util/files.py index e25b3fb8cf5b..2577d5a2158a 100644 --- a/chia/util/files.py +++ b/chia/util/files.py @@ -1,3 +1,5 @@ +# Package: utils + from __future__ import annotations import asyncio diff --git a/chia/util/hash.py b/chia/util/hash.py index ccd4abae3b8a..780dbc728f89 100644 --- a/chia/util/hash.py +++ b/chia/util/hash.py @@ -1,9 +1,11 @@ +# Package: utils + from __future__ import annotations from hashlib import sha256 from typing import Literal, SupportsBytes, Union, cast, overload -from chia.types.blockchain_format.sized_bytes import bytes32 +from chia_rs.sized_bytes import bytes32 @overload diff --git a/chia/util/inline_executor.py b/chia/util/inline_executor.py index 499671bb81aa..daa12f7f37f2 100644 --- a/chia/util/inline_executor.py +++ b/chia/util/inline_executor.py @@ -1,3 +1,5 @@ +# Package: utils + from __future__ import annotations from concurrent.futures import Executor, Future diff --git a/chia/util/keychain.py b/chia/util/keychain.py index a72321b2b900..0b835731dd10 100644 --- a/chia/util/keychain.py +++ b/chia/util/keychain.py @@ -1,3 +1,5 @@ +# Package: utils + from __future__ import annotations import sys @@ -11,9 +13,10 @@ import importlib_resources from bitstring import BitArray # pyright: reportMissingImports=false from chia_rs import AugSchemeMPL, G1Element, PrivateKey # pyright: reportMissingImports=false +from chia_rs.sized_bytes import bytes32 +from chia_rs.sized_ints import uint32 from typing_extensions import final -from chia.types.blockchain_format.sized_bytes import bytes32 from chia.util.bech32m import bech32_decode, convertbits from chia.util.byte_types import hexstr_to_bytes from chia.util.errors import ( @@ -27,7 +30,6 @@ ) from chia.util.file_keyring import Key from chia.util.hash import std_hash -from chia.util.ints import uint32 from chia.util.keyring_wrapper import KeyringWrapper from chia.util.streamable import Streamable, streamable diff --git a/chia/util/keyring_wrapper.py b/chia/util/keyring_wrapper.py index b65e90523bc4..89eb2d98e5ad 100644 --- a/chia/util/keyring_wrapper.py +++ b/chia/util/keyring_wrapper.py @@ -1,3 +1,5 @@ +# Package: utils + from __future__ import annotations from pathlib import Path diff --git a/chia/util/limited_semaphore.py b/chia/util/limited_semaphore.py index 955b58561fd3..62b2ab0de110 100644 --- a/chia/util/limited_semaphore.py +++ b/chia/util/limited_semaphore.py @@ -1,3 +1,5 @@ +# Package: utils + from __future__ import annotations import asyncio diff --git a/chia/util/lock.py b/chia/util/lock.py index 53ed8dcfea28..771a03a67246 100644 --- a/chia/util/lock.py +++ b/chia/util/lock.py @@ -1,3 +1,5 @@ +# Package: utils + from __future__ import annotations from dataclasses import dataclass diff --git a/chia/util/log_exceptions.py b/chia/util/log_exceptions.py index 27d6802fbbab..a097376c358e 100644 --- a/chia/util/log_exceptions.py +++ b/chia/util/log_exceptions.py @@ -1,3 +1,5 @@ +# Package: utils + from __future__ import annotations import logging diff --git a/chia/util/logging.py b/chia/util/logging.py index 07f6386322a9..eab283a110f8 100644 --- a/chia/util/logging.py +++ b/chia/util/logging.py @@ -1,3 +1,5 @@ +# Package: utils + from __future__ import annotations import logging diff --git a/chia/util/lru_cache.py b/chia/util/lru_cache.py index 926e0f455221..143df0dcfce2 100644 --- a/chia/util/lru_cache.py +++ b/chia/util/lru_cache.py @@ -1,3 +1,5 @@ +# Package: utils + from __future__ import annotations from collections import OrderedDict diff --git a/chia/util/math.py b/chia/util/math.py index 670818390537..fa556561d125 100644 --- a/chia/util/math.py +++ b/chia/util/math.py @@ -1,3 +1,5 @@ +# Package: utils + from __future__ import annotations diff --git a/chia/util/paginator.py b/chia/util/paginator.py index f673b1633bbe..d2aaa3785465 100644 --- a/chia/util/paginator.py +++ b/chia/util/paginator.py @@ -1,3 +1,5 @@ +# Package: utils + from __future__ import annotations import dataclasses diff --git a/chia/util/path.py b/chia/util/path.py index 70589796b2d1..a12154e9b7aa 100644 --- a/chia/util/path.py +++ b/chia/util/path.py @@ -1,3 +1,5 @@ +# Package: utils + from __future__ import annotations import os diff --git a/chia/util/permissions.py b/chia/util/permissions.py index 48812039e55f..8d9b8535cd74 100644 --- a/chia/util/permissions.py +++ b/chia/util/permissions.py @@ -1,3 +1,5 @@ +# Package: utils + from __future__ import annotations import os diff --git a/chia/util/priority_mutex.py b/chia/util/priority_mutex.py index a3db80c25645..3cab8c457ca3 100644 --- a/chia/util/priority_mutex.py +++ b/chia/util/priority_mutex.py @@ -1,3 +1,5 @@ +# Package: utils + from __future__ import annotations import asyncio diff --git a/chia/util/profiler.py b/chia/util/profiler.py index 2069e57b7aef..a1020c0a57c5 100644 --- a/chia/util/profiler.py +++ b/chia/util/profiler.py @@ -1,3 +1,5 @@ +# Package: utils + from __future__ import annotations import asyncio diff --git a/chia/util/recursive_replace.py b/chia/util/recursive_replace.py index 21c735201c97..7914ec8587c7 100644 --- a/chia/util/recursive_replace.py +++ b/chia/util/recursive_replace.py @@ -1,3 +1,5 @@ +# Package: utils + from __future__ import annotations from dataclasses import replace diff --git a/chia/util/setproctitle.py b/chia/util/setproctitle.py index ef07a66c8ac2..be4a26b3f4ac 100644 --- a/chia/util/setproctitle.py +++ b/chia/util/setproctitle.py @@ -1,3 +1,5 @@ +# Package: utils + from __future__ import annotations try: diff --git a/chia/util/significant_bits.py b/chia/util/significant_bits.py index 580cca704dcc..02fcf42db2b4 100644 --- a/chia/util/significant_bits.py +++ b/chia/util/significant_bits.py @@ -1,3 +1,5 @@ +# Package: utils + from __future__ import annotations diff --git a/chia/util/streamable.py b/chia/util/streamable.py index 3589ef03fe0e..6ddde747f7d9 100644 --- a/chia/util/streamable.py +++ b/chia/util/streamable.py @@ -1,3 +1,5 @@ +# Package: utils + from __future__ import annotations import dataclasses @@ -9,12 +11,12 @@ from enum import Enum from typing import TYPE_CHECKING, Any, BinaryIO, Callable, ClassVar, Optional, TypeVar, Union, get_type_hints +from chia_rs.sized_bytes import bytes32 +from chia_rs.sized_ints import uint16, uint32, uint64 from typing_extensions import Literal, get_args, get_origin -from chia.types.blockchain_format.sized_bytes import bytes32 from chia.util.byte_types import hexstr_to_bytes from chia.util.hash import std_hash -from chia.util.ints import uint16, uint32, uint64 if TYPE_CHECKING: from _typeshed import DataclassInstance diff --git a/chia/util/task_timing.py b/chia/util/task_timing.py index ac0ea1fbcf27..3ebce35a6779 100644 --- a/chia/util/task_timing.py +++ b/chia/util/task_timing.py @@ -1,3 +1,5 @@ +# Package: utils + from __future__ import annotations import asyncio diff --git a/chia/util/timing.py b/chia/util/timing.py index 705fa059c16a..c6b9b75383be 100644 --- a/chia/util/timing.py +++ b/chia/util/timing.py @@ -1,3 +1,5 @@ +# Package: utils + from __future__ import annotations import os diff --git a/chia/util/virtual_project_analysis.py b/chia/util/virtual_project_analysis.py index b5bf8f33a603..756e4b1b25e9 100644 --- a/chia/util/virtual_project_analysis.py +++ b/chia/util/virtual_project_analysis.py @@ -1,3 +1,5 @@ +# Package: utils + from __future__ import annotations import ast From 470ae0ff0291a337253609f17cb394370cbe1ffd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 18 Oct 2024 10:41:00 -0700 Subject: [PATCH 47/69] build(deps): bump boto3 from 1.34.143 to 1.35.43 (#18732) Bumps [boto3](https://github.com/boto/boto3) from 1.34.143 to 1.35.43. - [Release notes](https://github.com/boto/boto3/releases) - [Commits](https://github.com/boto/boto3/compare/1.34.143...1.35.43) --- updated-dependencies: - dependency-name: boto3 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 20 ++++++++++---------- pyproject.toml | 2 +- 2 files changed, 11 insertions(+), 11 deletions(-) diff --git a/poetry.lock b/poetry.lock index 9eece0490bb0..75a78b4483de 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "aiofiles" @@ -517,17 +517,17 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "boto3" -version = "1.34.143" +version = "1.35.43" description = "The AWS SDK for Python" optional = false python-versions = ">=3.8" files = [ - {file = "boto3-1.34.143-py3-none-any.whl", hash = "sha256:0d16832f23e6bd3ae94e35ea8e625529850bfad9baccd426de96ad8f445d8e03"}, - {file = "boto3-1.34.143.tar.gz", hash = "sha256:b590ce80c65149194def43ebf0ea1cf0533945502507837389a8d22e3ecbcf05"}, + {file = "boto3-1.35.43-py3-none-any.whl", hash = "sha256:e6a50a0599f75b21de0de1a551a0564793d25b304fa623e4052e527b268de734"}, + {file = "boto3-1.35.43.tar.gz", hash = "sha256:0197f460632804577aa78b2f6daf7b823bffa9d4d67a5cebb179efff0fe9631b"}, ] [package.dependencies] -botocore = ">=1.34.143,<1.35.0" +botocore = ">=1.35.43,<1.36.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.10.0,<0.11.0" @@ -536,13 +536,13 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "botocore" -version = "1.34.143" +version = "1.35.43" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">=3.8" files = [ - {file = "botocore-1.34.143-py3-none-any.whl", hash = "sha256:094aea179e8aaa1bc957ad49cc27d93b189dd3a1f3075d8b0ca7c445a2a88430"}, - {file = "botocore-1.34.143.tar.gz", hash = "sha256:059f032ec05733a836e04e869c5a15534420102f93116f3bc9a5b759b0651caf"}, + {file = "botocore-1.35.43-py3-none-any.whl", hash = "sha256:7cfdee9117617da97daaf259dd8484bcdc259c59eb7d1ce7db9ecf8506b7d36c"}, + {file = "botocore-1.35.43.tar.gz", hash = "sha256:04539b85ade060601a3023cacb538fc17aad8c059a5a2e18fe4bc5d0d91fbd72"}, ] [package.dependencies] @@ -554,7 +554,7 @@ urllib3 = [ ] [package.extras] -crt = ["awscrt (==0.20.11)"] +crt = ["awscrt (==0.22.0)"] [[package]] name = "build" @@ -3438,4 +3438,4 @@ upnp = ["miniupnpc"] [metadata] lock-version = "2.0" python-versions = ">=3.9, <3.13" -content-hash = "db54f56de18ee245440aea61e7a96bdbc73190327b1dbd69f7569ff57abd0cc0" +content-hash = "1596d4f11537d081c3e93faa9b05fb7f939556f84deeaf501e9b8b9b34d226cf" diff --git a/pyproject.toml b/pyproject.toml index da1dec873351..0316c09583df 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -43,7 +43,7 @@ aiohttp = "3.10.4" # HTTP server for full node rpc aiosqlite = "0.20.0" # asyncio wrapper for sqlite, to store blocks anyio = "4.3.0" bitstring = "4.1.4" # Binary data management library -boto3 = "1.34.143" # AWS S3 for Data Layer S3 plugin +boto3 = "1.35.43" # AWS S3 for Data Layer S3 plugin chiabip158 = "1.5.1" # bip158-style wallet filters chiapos = "2.0.4" # proof of space chia_rs = "0.15.0" From 9f63f969164eab2950345d55d1c766ee96f66200 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 21 Oct 2024 08:50:39 -0700 Subject: [PATCH 48/69] build(deps): bump anyio from 4.3.0 to 4.6.2.post1 (#18723) Bumps [anyio](https://github.com/agronholm/anyio) from 4.3.0 to 4.6.2.post1. - [Release notes](https://github.com/agronholm/anyio/releases) - [Changelog](https://github.com/agronholm/anyio/blob/master/docs/versionhistory.rst) - [Commits](https://github.com/agronholm/anyio/compare/4.3.0...4.6.2.post1) --- updated-dependencies: - dependency-name: anyio dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 16 ++++++++-------- pyproject.toml | 2 +- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/poetry.lock b/poetry.lock index 75a78b4483de..ecdc504371f2 100644 --- a/poetry.lock +++ b/poetry.lock @@ -178,13 +178,13 @@ files = [ [[package]] name = "anyio" -version = "4.3.0" +version = "4.6.2.post1" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"}, - {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"}, + {file = "anyio-4.6.2.post1-py3-none-any.whl", hash = "sha256:6d170c36fba3bdd840c73d3868c1e777e33676a69c3a72cf0a0d5d6d8009b61d"}, + {file = "anyio-4.6.2.post1.tar.gz", hash = "sha256:4c8bc31ccdb51c7f7bd251f51c609e038d63e34219b44aa86e47576389880b4c"}, ] [package.dependencies] @@ -194,9 +194,9 @@ sniffio = ">=1.1" typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} [package.extras] -doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] -trio = ["trio (>=0.23)"] +doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21.0b1)"] +trio = ["trio (>=0.26.1)"] [[package]] name = "argon2-cffi" @@ -3438,4 +3438,4 @@ upnp = ["miniupnpc"] [metadata] lock-version = "2.0" python-versions = ">=3.9, <3.13" -content-hash = "1596d4f11537d081c3e93faa9b05fb7f939556f84deeaf501e9b8b9b34d226cf" +content-hash = "0f281ddf563f4449cc174eaba6363a2f69df94b576dd4d785169768b64bd192d" diff --git a/pyproject.toml b/pyproject.toml index 0316c09583df..57fc9205a973 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -41,7 +41,7 @@ python = ">=3.9, <3.13" aiofiles = "24.1.0" # Async IO for files aiohttp = "3.10.4" # HTTP server for full node rpc aiosqlite = "0.20.0" # asyncio wrapper for sqlite, to store blocks -anyio = "4.3.0" +anyio = "4.6.2.post1" bitstring = "4.1.4" # Binary data management library boto3 = "1.35.43" # AWS S3 for Data Layer S3 plugin chiabip158 = "1.5.1" # bip158-style wallet filters From df7f87cfee312b164cb6abcabe7dba9af0519166 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 22 Oct 2024 12:06:46 -0700 Subject: [PATCH 49/69] build(deps): bump diff-cover from 9.0.0 to 9.2.0 (#18577) Bumps [diff-cover](https://github.com/Bachmann1234/diff-cover) from 9.0.0 to 9.2.0. - [Release notes](https://github.com/Bachmann1234/diff-cover/releases) - [Changelog](https://github.com/Bachmann1234/diff_cover/blob/main/CHANGELOG) - [Commits](https://github.com/Bachmann1234/diff-cover/compare/v9.0.0...v9.2.0) --- updated-dependencies: - dependency-name: diff-cover dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index ecdc504371f2..a5b6c9b737ff 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1224,13 +1224,13 @@ test-randomorder = ["pytest-randomly"] [[package]] name = "diff-cover" -version = "9.0.0" +version = "9.2.0" description = "Run coverage and linting reports on diffs" optional = true python-versions = "<4.0.0,>=3.8.10" files = [ - {file = "diff_cover-9.0.0-py3-none-any.whl", hash = "sha256:31b308259b79e2cab5f30aff499a3ea3ba9475f0d495d82ba9b6caa7487bca03"}, - {file = "diff_cover-9.0.0.tar.gz", hash = "sha256:1dc851d3f3f320c048d03618e4c0d9861fa4a1506b425d2d09a564b20c95674a"}, + {file = "diff_cover-9.2.0-py3-none-any.whl", hash = "sha256:1e24edc51c39e810c47dd9986e76c333ed95859655c091f572e590c39cabbdbe"}, + {file = "diff_cover-9.2.0.tar.gz", hash = "sha256:85a0b353ebbb678f9e87ea303f75b545bd0baca38f563219bb72f2ae862bba36"}, ] [package.dependencies] @@ -3438,4 +3438,4 @@ upnp = ["miniupnpc"] [metadata] lock-version = "2.0" python-versions = ">=3.9, <3.13" -content-hash = "0f281ddf563f4449cc174eaba6363a2f69df94b576dd4d785169768b64bd192d" +content-hash = "2cb2d09f09cd70719d304f344edf40c50753d47d76510e1db035f5a90ee66559" diff --git a/pyproject.toml b/pyproject.toml index 57fc9205a973..5a916c8b8da1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -82,7 +82,7 @@ aiohttp_cors = { version = "0.7.0", optional = true } black = { version = "24.8.0", optional = true } build = { version = "1.2.1", optional = true } coverage = { version = "7.6.1", optional = true } -diff-cover = { version = "9.0.0", optional = true } +diff-cover = { version = "9.2.0", optional = true } flake8 = { version = "7.1.1", optional = true } isort = { version = "5.13.2", optional = true } # TODO: but... keyrings_cryptfile goes 15 minutes without locking while this does in 75 seconds From 3b6f547d45e1b4f8b96afd9213c80f1cf36e5fd8 Mon Sep 17 00:00:00 2001 From: Kyle Altendorf Date: Tue, 22 Oct 2024 15:07:07 -0400 Subject: [PATCH 50/69] bytes zeros (#18475) * `ZERO_32` * `bytes(\d+)\(\[0\] \* \1\)` * `bytes(\d+)\(b"\\(x00|0)" \* \1\)` * `bytes(\d+)\(b"0" \* \1\)` * Revert "`bytes(\d+)\(b"0" \* \1\)`" This reverts commit 2d629dcb0a93b162272c17ce73e0fb577bc48f19. --- benchmarks/block_store.py | 10 ++-- chia/_tests/blockchain/test_blockchain.py | 8 ++-- .../blockchain/test_get_block_generator.py | 6 +-- chia/_tests/clvm/test_curry_and_treehash.py | 2 +- chia/_tests/clvm/test_puzzle_compression.py | 3 +- chia/_tests/clvm/test_spend_sim.py | 10 ++-- chia/_tests/cmds/wallet/test_coins.py | 2 +- chia/_tests/cmds/wallet/test_dao.py | 6 +-- chia/_tests/cmds/wallet/test_nft.py | 2 +- chia/_tests/cmds/wallet/test_vcs.py | 2 +- .../_tests/core/data_layer/test_data_layer.py | 4 +- .../core/data_layer/test_data_layer_util.py | 4 +- chia/_tests/core/data_layer/test_data_rpc.py | 30 ++++++------ .../_tests/core/data_layer/test_data_store.py | 16 +++---- .../core/data_layer/test_data_store_schema.py | 14 +++--- .../core/full_node/test_block_height_map.py | 2 +- chia/_tests/core/ssl/test_ssl.py | 2 +- chia/_tests/core/test_crawler.py | 4 +- chia/_tests/core/test_full_node_rpc.py | 2 +- chia/_tests/core/test_merkle_set.py | 8 ++-- chia/_tests/core/util/test_block_cache.py | 2 +- chia/_tests/core/util/test_jsonify.py | 2 +- chia/_tests/core/util/test_streamable.py | 2 +- chia/_tests/plot_sync/test_plot_sync.py | 2 +- chia/_tests/pools/test_pool_rpc.py | 12 ++--- chia/_tests/simulation/test_simulation.py | 2 +- .../cat_wallet/test_cat_outer_puzzle.py | 2 +- .../wallet/cat_wallet/test_cat_wallet.py | 12 ++--- .../wallet/cat_wallet/test_offer_lifecycle.py | 2 +- chia/_tests/wallet/cat_wallet/test_trades.py | 6 +-- chia/_tests/wallet/conftest.py | 2 +- .../wallet/db_wallet/test_db_graftroot.py | 4 +- .../_tests/wallet/db_wallet/test_dl_wallet.py | 30 ++++++------ .../wallet/nft_wallet/test_nft_1_offers.py | 4 +- .../wallet/nft_wallet/test_nft_lifecycle.py | 6 +-- .../wallet/nft_wallet/test_nft_wallet.py | 4 +- .../nft_wallet/test_ownership_outer_puzzle.py | 2 +- chia/_tests/wallet/rpc/test_dl_wallet_rpc.py | 14 +++--- chia/_tests/wallet/rpc/test_wallet_rpc.py | 14 +++--- chia/_tests/wallet/sync/test_wallet_sync.py | 10 ++-- chia/_tests/wallet/test_clvm_streamable.py | 2 +- chia/_tests/wallet/test_conditions.py | 2 +- chia/_tests/wallet/test_debug_spend_bundle.py | 12 ++--- .../_tests/wallet/test_new_wallet_protocol.py | 14 +++--- chia/_tests/wallet/test_notifications.py | 2 +- chia/_tests/wallet/test_sign_coin_spends.py | 6 +-- chia/_tests/wallet/test_signer_protocol.py | 18 +++---- .../wallet/test_singleton_lifecycle_fast.py | 2 +- chia/_tests/wallet/test_transaction_store.py | 6 +-- chia/_tests/wallet/test_util.py | 48 +++++++++---------- chia/_tests/wallet/test_wallet.py | 14 +++--- .../_tests/wallet/test_wallet_action_scope.py | 4 +- chia/_tests/wallet/test_wallet_node.py | 4 +- .../wallet/test_wallet_state_manager.py | 4 +- chia/_tests/wallet/test_wallet_trade_store.py | 4 +- .../wallet/vc_wallet/test_cr_outer_puzzle.py | 4 +- .../wallet/vc_wallet/test_vc_lifecycle.py | 10 ++-- .../_tests/wallet/vc_wallet/test_vc_wallet.py | 6 +-- chia/_tests/wallet/wallet_block_tools.py | 18 +++---- chia/consensus/block_creation.py | 2 +- chia/consensus/block_header_validation.py | 4 +- chia/data_layer/data_layer.py | 6 +-- chia/data_layer/data_layer_wallet.py | 4 +- chia/data_layer/data_store.py | 10 ++-- chia/data_layer/download_data.py | 6 +-- chia/full_node/full_node.py | 2 +- chia/full_node/full_node_api.py | 2 +- chia/rpc/data_layer_rpc_api.py | 4 +- chia/simulator/block_tools.py | 2 +- chia/simulator/full_node_simulator.py | 2 +- chia/wallet/dao_wallet/dao_wallet.py | 2 +- chia/wallet/did_wallet/did_wallet.py | 4 +- chia/wallet/trading/offer.py | 7 ++- chia/wallet/vc_wallet/vc_drivers.py | 2 +- chia/wallet/vc_wallet/vc_wallet.py | 2 +- 75 files changed, 250 insertions(+), 262 deletions(-) diff --git a/benchmarks/block_store.py b/benchmarks/block_store.py index bccf69323617..8e60a1ba9944 100644 --- a/benchmarks/block_store.py +++ b/benchmarks/block_store.py @@ -46,8 +46,8 @@ async def run_add_block_benchmark(version: int) -> None: # keep track of benchmark total time all_test_time = 0.0 - prev_block = bytes32([0] * 32) - prev_ses_hash = bytes32([0] * 32) + prev_block = bytes32.zeros + prev_ses_hash = bytes32.zeros header_hashes = [] @@ -63,7 +63,7 @@ async def run_add_block_benchmark(version: int) -> None: sub_slot_iters = uint64(10) required_iters = uint64(100) transaction_block_counter = 0 - prev_transaction_block = bytes32([0] * 32) + prev_transaction_block = bytes32.zeros prev_transaction_height = uint32(0) total_time = 0.0 ses_counter = 0 @@ -134,7 +134,7 @@ async def run_add_block_benchmark(version: int) -> None: pool_target, rand_g2() if has_pool_pk else None, # pool_signature rand_hash(), # farmer_reward_puzzle_hash - bytes32([0] * 32), # extension_data + bytes32.zeros, # extension_data ) foliage = Foliage( @@ -208,7 +208,7 @@ async def run_add_block_benchmark(version: int) -> None: deficit == 16, prev_transaction_height, timestamp if is_transaction else None, - prev_transaction_block if prev_transaction_block != bytes32([0] * 32) else None, + prev_transaction_block if prev_transaction_block != bytes32.zeros else None, None if fees == 0 else fees, reward_claims_incorporated, finished_challenge_slot_hashes, diff --git a/chia/_tests/blockchain/test_blockchain.py b/chia/_tests/blockchain/test_blockchain.py index ac6516dab2cf..15678c603834 100644 --- a/chia/_tests/blockchain/test_blockchain.py +++ b/chia/_tests/blockchain/test_blockchain.py @@ -608,7 +608,7 @@ async def test_genesis_no_icc(self, empty_blockchain: Blockchain, bt: BlockTools "infused_challenge_chain", InfusedChallengeChainSubSlot( VDFInfo( - bytes32([0] * 32), + bytes32.zeros, uint64(1200), ClassgroupElement.get_default_element(), ) @@ -679,7 +679,7 @@ async def do_test_invalid_icc_sub_slot_vdf( block.finished_sub_slots[ -1 ].infused_challenge_chain.infused_challenge_chain_end_of_slot_vdf.replace( - challenge=bytes32([0] * 32) + challenge=bytes32.zeros ) ), ) @@ -1101,7 +1101,7 @@ async def test_genesis_has_ses(self, empty_blockchain: Blockchain, bt: BlockTool recursive_replace( block.finished_sub_slots[0].challenge_chain, "subepoch_summary_hash", - bytes32([0] * 32), + bytes32.zeros, ), ) @@ -1136,7 +1136,7 @@ async def test_no_ses_if_no_se(self, empty_blockchain: Blockchain, bt: BlockTool recursive_replace( blocks[-1].finished_sub_slots[0].challenge_chain, "subepoch_summary_hash", - bytes32([0] * 32), + bytes32.zeros, ), ) diff --git a/chia/_tests/blockchain/test_get_block_generator.py b/chia/_tests/blockchain/test_get_block_generator.py index d1d57be6c3c5..748102b6d389 100644 --- a/chia/_tests/blockchain/test_get_block_generator.py +++ b/chia/_tests/blockchain/test_get_block_generator.py @@ -53,20 +53,20 @@ async def only_lookup_5(hh: bytes32, refs: set[uint32]) -> dict[uint32, bytes]: @pytest.mark.anyio async def test_failing_lookup() -> None: - br = BR(bytes32([0] * 32), DUMMY_PROGRAM, [uint32(1)]) + br = BR(bytes32.zeros, DUMMY_PROGRAM, [uint32(1)]) with pytest.raises(KeyError): await get_block_generator(zero_hits, br) @pytest.mark.anyio async def test_no_generator() -> None: - br = BR(bytes32([0] * 32), None, [uint32(1)]) + br = BR(bytes32.zeros, None, [uint32(1)]) with pytest.raises(AssertionError): await get_block_generator(zero_hits, br) @pytest.mark.anyio async def test_no_refs() -> None: - br = BR(bytes32([0] * 32), DUMMY_PROGRAM, []) + br = BR(bytes32.zeros, DUMMY_PROGRAM, []) bg = await get_block_generator(never_called, br) assert bg == BlockGenerator(DUMMY_PROGRAM, []) diff --git a/chia/_tests/clvm/test_curry_and_treehash.py b/chia/_tests/clvm/test_curry_and_treehash.py index fc2595ba9fb4..deae36bf6952 100644 --- a/chia/_tests/clvm/test_curry_and_treehash.py +++ b/chia/_tests/clvm/test_curry_and_treehash.py @@ -33,7 +33,7 @@ def test_curry_and_treehash() -> None: @pytest.mark.parametrize( - "value", [[], [bytes32([3] * 32)], [bytes32([0] * 32), bytes32([1] * 32)], [bytes([1]), bytes([1, 2, 3])]] + "value", [[], [bytes32([3] * 32)], [bytes32.zeros, bytes32([1] * 32)], [bytes([1]), bytes([1, 2, 3])]] ) def test_shatree_atom_list(value: list[bytes]) -> None: h1 = shatree_atom_list(value) diff --git a/chia/_tests/clvm/test_puzzle_compression.py b/chia/_tests/clvm/test_puzzle_compression.py index 78d7d8e94478..feafce603a18 100644 --- a/chia/_tests/clvm/test_puzzle_compression.py +++ b/chia/_tests/clvm/test_puzzle_compression.py @@ -24,9 +24,8 @@ lowest_best_version, ) -ZERO_32 = bytes32([0] * 32) ONE_32 = bytes32([17] * 32) -COIN = Coin(ZERO_32, ZERO_32, uint64(0)) +COIN = Coin(bytes32.zeros, bytes32.zeros, uint64(0)) SOLUTION = Program.to([]) diff --git a/chia/_tests/clvm/test_spend_sim.py b/chia/_tests/clvm/test_spend_sim.py index 64f15b86689a..2896070972ec 100644 --- a/chia/_tests/clvm/test_spend_sim.py +++ b/chia/_tests/clvm/test_spend_sim.py @@ -41,7 +41,7 @@ async def test_all_endpoints(): async with sim_and_client() as (sim, sim_client): for i in range(0, 5): await sim.farm_block() - await sim.farm_block(bytes32([0] * 32)) + await sim.farm_block(bytes32.zeros) await sim.farm_block(bytes32([1] * 32)) for i in range(0, 5): await sim.farm_block() @@ -95,19 +95,19 @@ async def test_all_endpoints(): assert len(coin_records) == 0 # get_coin_records_by_puzzle_hash - coin_records = await sim_client.get_coin_records_by_puzzle_hash(bytes32([0] * 32)) + coin_records = await sim_client.get_coin_records_by_puzzle_hash(bytes32.zeros) coin_record_name = coin_records[0].coin.name() assert len(coin_records) == 2 - coin_records = await sim_client.get_coin_records_by_puzzle_hash(bytes32([0] * 32), start_height=0, end_height=2) + coin_records = await sim_client.get_coin_records_by_puzzle_hash(bytes32.zeros, start_height=0, end_height=2) assert len(coin_records) == 0 # get_coin_records_by_puzzle_hashes - coin_records = await sim_client.get_coin_records_by_puzzle_hashes([bytes32([0] * 32), bytes32([1] * 32)]) + coin_records = await sim_client.get_coin_records_by_puzzle_hashes([bytes32.zeros, bytes32([1] * 32)]) assert len(coin_records) == 4 coin_records = await sim_client.get_coin_records_by_puzzle_hashes( - [bytes32([0] * 32), bytes32([1] * 32)], start_height=0, end_height=2 + [bytes32.zeros, bytes32([1] * 32)], start_height=0, end_height=2 ) assert len(coin_records) == 0 diff --git a/chia/_tests/cmds/wallet/test_coins.py b/chia/_tests/cmds/wallet/test_coins.py index 3d49272ec157..ceca5f538753 100644 --- a/chia/_tests/cmds/wallet/test_coins.py +++ b/chia/_tests/cmds/wallet/test_coins.py @@ -113,7 +113,7 @@ async def combine_coins( wallet_id=uint32(1), number_of_coins=uint16(500), largest_first=True, - target_coin_ids=[bytes32([0] * 32)], + target_coin_ids=[bytes32.zeros], target_coin_amount=uint64(1_000_000_000_000), fee=uint64(500_000_000_000), push=False, diff --git a/chia/_tests/cmds/wallet/test_dao.py b/chia/_tests/cmds/wallet/test_dao.py index aa78b24a1c77..6f6292459401 100644 --- a/chia/_tests/cmds/wallet/test_dao.py +++ b/chia/_tests/cmds/wallet/test_dao.py @@ -317,7 +317,7 @@ async def dao_create_proposal( push: bool = True, timelock_info: ConditionValidTimes = ConditionValidTimes(), ) -> DAOCreateProposalResponse: - return DAOCreateProposalResponse([STD_UTX], [STD_TX], bytes32([0] * 32), STD_TX.name, STD_TX) + return DAOCreateProposalResponse([STD_UTX], [STD_TX], bytes32.zeros, STD_TX.name, STD_TX) async def get_wallets(self, wallet_type: Optional[WalletType] = None) -> list[dict[str, Union[str, int]]]: return [{"id": 1, "type": 0}, {"id": 2, "type": 14}] @@ -429,7 +429,7 @@ async def get_transaction(self, transaction_id: bytes32) -> TransactionRecord: "-m 0.1", "--reuse", ] - proposal_asserts = ["Successfully created proposal", f"Proposal ID: {bytes32([0] * 32).hex()}"] + proposal_asserts = ["Successfully created proposal", f"Proposal ID: {bytes32.zeros.hex()}"] run_cli_command_and_assert(capsys, root_dir, spend_args, proposal_asserts) bad_spend_args = [ @@ -447,7 +447,7 @@ async def get_transaction(self, transaction_id: bytes32) -> TransactionRecord: "-m 0.1", "--reuse", ] - proposal_asserts = ["Successfully created proposal", f"Proposal ID: {bytes32([0] * 32).hex()}"] + proposal_asserts = ["Successfully created proposal", f"Proposal ID: {bytes32.zeros.hex()}"] with pytest.raises(ValueError) as e_info: run_cli_command_and_assert(capsys, root_dir, bad_spend_args, proposal_asserts) assert e_info.value.args[0] == "Must include a json specification or an address / amount pair." diff --git a/chia/_tests/cmds/wallet/test_nft.py b/chia/_tests/cmds/wallet/test_nft.py index d5b12e593067..5d9f74bebaaf 100644 --- a/chia/_tests/cmds/wallet/test_nft.py +++ b/chia/_tests/cmds/wallet/test_nft.py @@ -129,7 +129,7 @@ async def mint_nft( [STD_TX], uint32(wallet_id), WalletSpendBundle([], G2Element()), - bytes32([0] * 32).hex(), + bytes32.zeros.hex(), ) inst_rpc_client = NFTCreateRpcClient() # pylint: disable=no-value-for-parameter diff --git a/chia/_tests/cmds/wallet/test_vcs.py b/chia/_tests/cmds/wallet/test_vcs.py index a475d40f3d0e..f8d1aacdbfac 100644 --- a/chia/_tests/cmds/wallet/test_vcs.py +++ b/chia/_tests/cmds/wallet/test_vcs.py @@ -47,7 +47,7 @@ async def vc_mint( LineageProof(None, None, None), VCLineageProof(None, None, None, None), bytes32([3] * 32), - bytes32([0] * 32), + bytes32.zeros, bytes32([1] * 32), None, ), diff --git a/chia/_tests/core/data_layer/test_data_layer.py b/chia/_tests/core/data_layer/test_data_layer.py index 71fa0786ef45..3a0eb6fbced0 100644 --- a/chia/_tests/core/data_layer/test_data_layer.py +++ b/chia/_tests/core/data_layer/test_data_layer.py @@ -75,8 +75,8 @@ async def wallet_rpc_init() -> WalletRpcClient: ) async with data_layer.manage(): - await data_layer.get_downloader(store_id=bytes32([0] * 32), url="") - await data_layer.get_uploaders(store_id=bytes32([0] * 32)) + await data_layer.get_downloader(store_id=bytes32.zeros, url="") + await data_layer.get_uploaders(store_id=bytes32.zeros) await data_layer.check_plugins() header_values = {request.headers.get(header_key) for request in recording_web_server.requests} diff --git a/chia/_tests/core/data_layer/test_data_layer_util.py b/chia/_tests/core/data_layer/test_data_layer_util.py index 8ef483dfec1d..3fd93a3f0f94 100644 --- a/chia/_tests/core/data_layer/test_data_layer_util.py +++ b/chia/_tests/core/data_layer/test_data_layer_util.py @@ -105,7 +105,7 @@ class RoundTripCase: RoundTripCase( id="Root", instance=Root( - store_id=bytes32(b"\x00" * 32), + store_id=bytes32.zeros, node_hash=bytes32(b"\x01" * 32), generation=3, status=Status.PENDING, @@ -120,7 +120,7 @@ class RoundTripCase: instance=ClearPendingRootsResponse( success=True, root=Root( - store_id=bytes32(b"\x00" * 32), + store_id=bytes32.zeros, node_hash=bytes32(b"\x01" * 32), generation=3, status=Status.PENDING, diff --git a/chia/_tests/core/data_layer/test_data_rpc.py b/chia/_tests/core/data_layer/test_data_rpc.py index 329c65c99293..d637154a2bc8 100644 --- a/chia/_tests/core/data_layer/test_data_rpc.py +++ b/chia/_tests/core/data_layer/test_data_rpc.py @@ -320,7 +320,7 @@ async def test_create_insert_get( await data_rpc_api.get_value({"id": store_id.hex(), "key": key.hex()}) wallet_root = await data_rpc_api.get_root({"id": store_id.hex()}) local_root = await data_rpc_api.get_local_root({"id": store_id.hex()}) - assert wallet_root["hash"] == bytes32([0] * 32) + assert wallet_root["hash"] == bytes32.zeros assert local_root["hash"] is None # test empty changelist @@ -505,7 +505,7 @@ async def test_get_roots( await farm_block_with_spend(full_node_api, ph, update_tx_rec0, wallet_rpc_api) roots = await data_rpc_api.get_roots({"ids": [store_id1.hex(), store_id2.hex()]}) assert roots["root_hashes"][1]["id"] == store_id2 - assert roots["root_hashes"][1]["hash"] == bytes32([0] * 32) + assert roots["root_hashes"][1]["hash"] == bytes32.zeros assert roots["root_hashes"][1]["confirmed"] is True assert roots["root_hashes"][1]["timestamp"] > 0 key4 = b"d" @@ -520,7 +520,7 @@ async def test_get_roots( roots = await data_rpc_api.get_roots({"ids": [store_id1.hex(), store_id2.hex()]}) assert roots["root_hashes"][1]["id"] == store_id2 assert roots["root_hashes"][1]["hash"] is not None - assert roots["root_hashes"][1]["hash"] != bytes32([0] * 32) + assert roots["root_hashes"][1]["hash"] != bytes32.zeros assert roots["root_hashes"][1]["confirmed"] is True assert roots["root_hashes"][1]["timestamp"] > 0 @@ -552,10 +552,10 @@ async def test_get_root_history( await farm_block_with_spend(full_node_api, ph, update_tx_rec0, wallet_rpc_api) history1 = await data_rpc_api.get_root_history({"id": store_id1.hex()}) assert len(history1["root_history"]) == 2 - assert history1["root_history"][0]["root_hash"] == bytes32([0] * 32) + assert history1["root_history"][0]["root_hash"] == bytes32.zeros assert history1["root_history"][0]["confirmed"] is True assert history1["root_history"][0]["timestamp"] > 0 - assert history1["root_history"][1]["root_hash"] != bytes32([0] * 32) + assert history1["root_history"][1]["root_hash"] != bytes32.zeros assert history1["root_history"][1]["confirmed"] is True assert history1["root_history"][1]["timestamp"] > 0 key4 = b"d" @@ -569,7 +569,7 @@ async def test_get_root_history( await farm_block_with_spend(full_node_api, ph, update_tx_rec1, wallet_rpc_api) history2 = await data_rpc_api.get_root_history({"id": store_id1.hex()}) assert len(history2["root_history"]) == 3 - assert history2["root_history"][0]["root_hash"] == bytes32([0] * 32) + assert history2["root_history"][0]["root_hash"] == bytes32.zeros assert history2["root_history"][0]["confirmed"] is True assert history2["root_history"][0]["timestamp"] > 0 assert history2["root_history"][1]["root_hash"] == history1["root_history"][1]["root_hash"] @@ -608,7 +608,7 @@ async def test_get_kv_diff( diff_res = await data_rpc_api.get_kv_diff( { "id": store_id1.hex(), - "hash_1": bytes32([0] * 32).hex(), + "hash_1": bytes32.zeros.hex(), "hash_2": history["root_history"][1]["root_hash"].hex(), } ) @@ -688,7 +688,7 @@ async def test_batch_update_matches_single_operations( root_1 = await data_rpc_api.get_roots({"ids": [store_id.hex()]}) expected_res_hash = root_1["root_hashes"][0]["hash"] - assert expected_res_hash != bytes32([0] * 32) + assert expected_res_hash != bytes32.zeros changelist = [{"action": "delete", "key": key_2.hex()}] res = await data_rpc_api.batch_update({"id": store_id.hex(), "changelist": changelist}) @@ -702,7 +702,7 @@ async def test_batch_update_matches_single_operations( root_2 = await data_rpc_api.get_roots({"ids": [store_id.hex()]}) hash_2 = root_2["root_hashes"][0]["hash"] - assert hash_2 == bytes32([0] * 32) + assert hash_2 == bytes32.zeros changelist = [{"action": "insert", "key": key.hex(), "value": value.hex()}] changelist.append({"action": "insert", "key": key_2.hex(), "value": value_2.hex()}) @@ -867,7 +867,7 @@ async def offer_setup_fixture( StoreSetup( api=data_rpc_api, id=bytes32.from_hexstr(create_response["id"]), - original_hash=bytes32([0] * 32), + original_hash=bytes32.zeros, data_layer=data_layer, data_rpc_client=data_rpc_client, ) @@ -1650,13 +1650,13 @@ async def test_make_and_take_offer(offer_setup: OfferSetup, reference: MakeAndTa assert [generation["confirmed"] for generation in maker_history] == [True] * len(maker_history) assert [generation["root_hash"] for generation in maker_history] == [ - bytes32([0] * 32), + bytes32.zeros, *reference.maker_root_history, ] assert [generation["confirmed"] for generation in taker_history] == [True] * len(taker_history) assert [generation["root_hash"] for generation in taker_history] == [ - bytes32([0] * 32), + bytes32.zeros, *reference.taker_root_history, ] @@ -1753,7 +1753,7 @@ async def test_make_offer_failure_rolls_back_db(offer_setup: OfferSetup) -> None "inclusions": reference.maker_inclusions, }, { - "store_id": bytes32([0] * 32).hex(), + "store_id": bytes32.zeros.hex(), "inclusions": [], }, ], @@ -2531,7 +2531,7 @@ async def populate_proof_setup(offer_setup: OfferSetup, count: int) -> OfferSetu taker=StoreSetup( api=offer_setup.taker.api, id=offer_setup.taker.id, - original_hash=bytes32([0] * 32), + original_hash=bytes32.zeros, data_layer=offer_setup.taker.data_layer, data_rpc_client=offer_setup.taker.data_rpc_client, ), @@ -3053,7 +3053,7 @@ async def test_pagination_cmds( update_tx_rec0 = res["tx_id"] await farm_block_with_spend(full_node_api, ph, update_tx_rec0, wallet_rpc_api) local_root = await data_rpc_api.get_local_root({"id": store_id.hex()}) - hash_1 = bytes32([0] * 32) + hash_1 = bytes32.zeros hash_2 = local_root["hash"] # `InterfaceLayer.direct` is not tested here since test `test_pagination_rpcs` extensively use it. if layer == InterfaceLayer.funcs: diff --git a/chia/_tests/core/data_layer/test_data_store.py b/chia/_tests/core/data_layer/test_data_store.py index af0b5cd45b4a..79565c5684c1 100644 --- a/chia/_tests/core/data_layer/test_data_store.py +++ b/chia/_tests/core/data_layer/test_data_store.py @@ -102,7 +102,7 @@ async def test_create_creates_tables_and_columns( @pytest.mark.anyio async def test_create_tree_accepts_bytes32(raw_data_store: DataStore) -> None: - store_id = bytes32(b"\0" * 32) + store_id = bytes32.zeros await raw_data_store.create_tree(store_id=store_id) @@ -723,7 +723,7 @@ async def test_autoinsert_balances_gaps(data_store: DataStore, store_id: bytes32 if i == 0 or i > 10: insert_result = await data_store.autoinsert(key, value, store_id, status=Status.COMMITTED) else: - reference_node_hash = await data_store.get_terminal_node_for_seed(store_id, bytes32([0] * 32)) + reference_node_hash = await data_store.get_terminal_node_for_seed(store_id, bytes32.zeros) insert_result = await data_store.insert( key=key, value=value, @@ -1176,7 +1176,7 @@ async def test_kv_diff_2(data_store: DataStore, store_id: bytes32) -> None: reference_node_hash=None, side=None, ) - empty_hash = bytes32([0] * 32) + empty_hash = bytes32.zeros invalid_hash = bytes32([0] * 31 + [1]) diff_1 = await data_store.get_kv_diff(store_id, empty_hash, insert_result.node_hash) assert diff_1 == {DiffData(OperationType.INSERT, b"000", b"000")} @@ -1252,7 +1252,7 @@ async def test_subscribe_unsubscribe(data_store: DataStore, store_id: bytes32) - await data_store.unsubscribe(store_id) assert await data_store.get_subscriptions() == [] - store_id2 = bytes32([0] * 32) + store_id2 = bytes32.zeros await data_store.subscribe( Subscription( @@ -1650,7 +1650,7 @@ async def test_benchmark_batch_insert_speed_multiple_batches( @pytest.mark.anyio async def test_delete_store_data(raw_data_store: DataStore) -> None: - store_id = bytes32(b"\0" * 32) + store_id = bytes32.zeros store_id_2 = bytes32(b"\0" * 31 + b"\1") await raw_data_store.create_tree(store_id=store_id, status=Status.COMMITTED) await raw_data_store.create_tree(store_id=store_id_2, status=Status.COMMITTED) @@ -1865,7 +1865,7 @@ async def test_insert_from_delta_file( for generation in range(1, num_files + 2): root = await data_store.get_tree_root(store_id=store_id, generation=generation) await write_files_for_root(data_store, store_id, root, tmp_path_1, 0, False, group_files_by_store) - root_hashes.append(bytes32([0] * 32) if root.node_hash is None else root.node_hash) + root_hashes.append(bytes32.zeros if root.node_hash is None else root.node_hash) store_path = tmp_path_1.joinpath(f"{store_id}") if group_files_by_store else tmp_path_1 with os.scandir(store_path) as entries: filenames = {entry.name for entry in entries} @@ -2020,7 +2020,7 @@ async def test_insert_from_delta_file_correct_file_exists( for generation in range(1, num_files + 2): root = await data_store.get_tree_root(store_id=store_id, generation=generation) await write_files_for_root(data_store, store_id, root, tmp_path, 0, group_by_store=group_files_by_store) - root_hashes.append(bytes32([0] * 32) if root.node_hash is None else root.node_hash) + root_hashes.append(bytes32.zeros if root.node_hash is None else root.node_hash) store_path = tmp_path.joinpath(f"{store_id}") if group_files_by_store else tmp_path with os.scandir(store_path) as entries: filenames = {entry.name for entry in entries} @@ -2333,7 +2333,7 @@ async def test_get_nodes(data_store: DataStore, store_id: bytes32) -> None: nodes = await data_store.get_nodes([node.hash for node in expected_nodes]) assert nodes == expected_nodes - node_hash = bytes32([0] * 32) + node_hash = bytes32.zeros node_hash_2 = bytes32([0] * 31 + [1]) with pytest.raises(Exception, match=f"^Nodes not found for hashes: {node_hash.hex()}, {node_hash_2.hex()}"): await data_store.get_nodes([node_hash, node_hash_2] + [node.hash for node in expected_nodes]) diff --git a/chia/_tests/core/data_layer/test_data_store_schema.py b/chia/_tests/core/data_layer/test_data_store_schema.py index 72c6500344d0..6e6ea10eb928 100644 --- a/chia/_tests/core/data_layer/test_data_store_schema.py +++ b/chia/_tests/core/data_layer/test_data_store_schema.py @@ -212,7 +212,7 @@ async def test_root_generation_must_not_be_less_than_zero( ) -> None: example = await add_01234567_example(data_store=data_store, store_id=store_id) values = { - "tree_id": bytes32([0] * 32), + "tree_id": bytes32.zeros, "generation": generation, "node_hash": example.terminal_nodes[0], "status": Status.PENDING, @@ -233,7 +233,7 @@ async def test_root_generation_must_not_be_less_than_zero( async def test_root_generation_must_not_be_null(data_store: DataStore, store_id: bytes32) -> None: example = await add_01234567_example(data_store=data_store, store_id=store_id) values = { - "tree_id": bytes32([0] * 32), + "tree_id": bytes32.zeros, "generation": None, "node_hash": example.terminal_nodes[0], "status": Status.PENDING, @@ -252,7 +252,7 @@ async def test_root_generation_must_not_be_null(data_store: DataStore, store_id: @pytest.mark.anyio async def test_root_node_hash_must_reference(data_store: DataStore) -> None: - values = {"tree_id": bytes32([0] * 32), "generation": 0, "node_hash": bytes32([0] * 32), "status": Status.PENDING} + values = {"tree_id": bytes32.zeros, "generation": 0, "node_hash": bytes32.zeros, "status": Status.PENDING} async with data_store.db_wrapper.writer() as writer: with pytest.raises(sqlite3.IntegrityError, match=r"^FOREIGN KEY constraint failed$"): @@ -270,7 +270,7 @@ async def test_root_node_hash_must_reference(data_store: DataStore) -> None: async def test_root_status_must_be_valid(data_store: DataStore, store_id: bytes32, bad_status: int) -> None: example = await add_01234567_example(data_store=data_store, store_id=store_id) values = { - "tree_id": bytes32([0] * 32), + "tree_id": bytes32.zeros, "generation": 0, "node_hash": example.terminal_nodes[0], "status": bad_status, @@ -290,7 +290,7 @@ async def test_root_status_must_be_valid(data_store: DataStore, store_id: bytes3 @pytest.mark.anyio async def test_root_status_must_not_be_null(data_store: DataStore, store_id: bytes32) -> None: example = await add_01234567_example(data_store=data_store, store_id=store_id) - values = {"tree_id": bytes32([0] * 32), "generation": 0, "node_hash": example.terminal_nodes[0], "status": None} + values = {"tree_id": bytes32.zeros, "generation": 0, "node_hash": example.terminal_nodes[0], "status": None} async with data_store.db_wrapper.writer() as writer: with pytest.raises(sqlite3.IntegrityError, match=r"^NOT NULL constraint failed: root.status$"): @@ -334,7 +334,7 @@ async def test_ancestors_ancestor_must_be_32( INSERT INTO ancestors(hash, ancestor, tree_id, generation) VALUES(:hash, :ancestor, :tree_id, :generation) """, - {"hash": node_hash, "ancestor": bytes([0] * length), "tree_id": bytes32([0] * 32), "generation": 0}, + {"hash": node_hash, "ancestor": bytes([0] * length), "tree_id": bytes32.zeros, "generation": 0}, ) @@ -353,7 +353,7 @@ async def test_ancestors_store_id_must_be_32( INSERT INTO ancestors(hash, ancestor, tree_id, generation) VALUES(:hash, :ancestor, :tree_id, :generation) """, - {"hash": node_hash, "ancestor": bytes32([0] * 32), "tree_id": bytes([0] * length), "generation": 0}, + {"hash": node_hash, "ancestor": bytes32.zeros, "tree_id": bytes([0] * length), "generation": 0}, ) diff --git a/chia/_tests/core/full_node/test_block_height_map.py b/chia/_tests/core/full_node/test_block_height_map.py index a776d11af0b7..c076a53c5a9e 100644 --- a/chia/_tests/core/full_node/test_block_height_map.py +++ b/chia/_tests/core/full_node/test_block_height_map.py @@ -71,7 +71,7 @@ async def setup_chain( ) -> None: height = start_height peak_hash = gen_block_hash(height + chain_id * 65536) - parent_hash = bytes32([0] * 32) + parent_hash = bytes32.zeros while height < length: ses = None if ses_every is not None and height % ses_every == 0: diff --git a/chia/_tests/core/ssl/test_ssl.py b/chia/_tests/core/ssl/test_ssl.py index b956ed1b83ff..544d35e1a4a9 100644 --- a/chia/_tests/core/ssl/test_ssl.py +++ b/chia/_tests/core/ssl/test_ssl.py @@ -33,7 +33,7 @@ async def establish_connection(server: ChiaServer, self_hostname: str, ssl_conte True, server.received_message_callback, None, - bytes32(b"\x00" * 32), + bytes32.zeros, 100, 30, local_capabilities_for_handshake=default_capabilities[NodeType.FULL_NODE], diff --git a/chia/_tests/core/test_crawler.py b/chia/_tests/core/test_crawler.py index 2f90b5e08afd..7b3486a7a18e 100644 --- a/chia/_tests/core/test_crawler.py +++ b/chia/_tests/core/test_crawler.py @@ -58,7 +58,7 @@ def receiving_failed() -> bool: return "Non existing function: request_children" in caplog.text with caplog.at_level(logging.ERROR): - msg = make_msg(ProtocolMessageTypes.request_children, RequestChildren(bytes32(b"\0" * 32))) + msg = make_msg(ProtocolMessageTypes.request_children, RequestChildren(bytes32.zeros)) assert await connection.send_message(msg) await time_out_assert(10, receiving_failed) @@ -83,7 +83,7 @@ def peer_added() -> bool: msg = make_msg( ProtocolMessageTypes.new_peak, - NewPeak(bytes32(b"\0" * 32), uint32(2), uint128(1), uint32(1), bytes32(b"\1" * 32)), + NewPeak(bytes32.zeros, uint32(2), uint128(1), uint32(1), bytes32(b"\1" * 32)), ) assert await connection.send_message(msg) await time_out_assert(10, peer_added) diff --git a/chia/_tests/core/test_full_node_rpc.py b/chia/_tests/core/test_full_node_rpc.py index a6a512a0c89b..128a9f294594 100644 --- a/chia/_tests/core/test_full_node_rpc.py +++ b/chia/_tests/core/test_full_node_rpc.py @@ -711,7 +711,7 @@ async def test_coin_name_not_found_in_mempool(one_node, self_hostname): full_node_service.config, ) - empty_coin_name = bytes32([0] * 32) + empty_coin_name = bytes32.zeros mempool_item = await client.get_mempool_items_by_coin_name(empty_coin_name) assert mempool_item["success"] == True assert "mempool_items" in mempool_item diff --git a/chia/_tests/core/test_merkle_set.py b/chia/_tests/core/test_merkle_set.py index 6d2fae441b03..8ba092ace4b8 100644 --- a/chia/_tests/core/test_merkle_set.py +++ b/chia/_tests/core/test_merkle_set.py @@ -93,7 +93,7 @@ async def test_merkle_set_duplicate() -> None: async def test_merkle_set_0() -> None: merkle_set = MerkleSet([]) assert merkle_set.get_root() == bytes32(compute_merkle_set_root([])) - assert merkle_set.get_root() == bytes32([0] * 32) + assert merkle_set.get_root() == bytes32.zeros @pytest.mark.anyio @@ -155,7 +155,7 @@ async def test_merkle_set_4() -> None: @pytest.mark.anyio async def test_merkle_set_5() -> None: - BLANK = bytes32([0] * 32) + BLANK = bytes32.zeros a = bytes32([0x58] + [0] * 31) b = bytes32([0x23] + [0] * 31) @@ -202,7 +202,7 @@ async def test_merkle_set_5() -> None: @pytest.mark.anyio async def test_merkle_left_edge() -> None: - BLANK = bytes32([0] * 32) + BLANK = bytes32.zeros a = bytes32([0x80] + [0] * 31) b = bytes32([0] * 31 + [1]) c = bytes32([0] * 31 + [2]) @@ -241,7 +241,7 @@ async def test_merkle_left_edge() -> None: @pytest.mark.anyio async def test_merkle_right_edge() -> None: - BLANK = bytes32([0] * 32) + BLANK = bytes32.zeros a = bytes32([0x40] + [0] * 31) b = bytes32([0xFF] * 31 + [0xFF]) c = bytes32([0xFF] * 31 + [0xFE]) diff --git a/chia/_tests/core/util/test_block_cache.py b/chia/_tests/core/util/test_block_cache.py index 913395751b5a..e7ed476aacc5 100644 --- a/chia/_tests/core/util/test_block_cache.py +++ b/chia/_tests/core/util/test_block_cache.py @@ -26,7 +26,7 @@ def BR(height: int, header_hash: bytes32, prev_hash: bytes32) -> BlockRecord: @pytest.mark.anyio async def test_block_cache(seeded_random: random.Random) -> None: a = BlockCache({}) - prev = bytes32([0] * 32) + prev = bytes32.zeros hashes = [bytes32.random(seeded_random) for _ in range(10)] for i, hh in enumerate(hashes): a.add_block(BR(i + 1, hh, prev)) diff --git a/chia/_tests/core/util/test_jsonify.py b/chia/_tests/core/util/test_jsonify.py index e6920fcefbde..a17fb241b849 100644 --- a/chia/_tests/core/util/test_jsonify.py +++ b/chia/_tests/core/util/test_jsonify.py @@ -46,7 +46,7 @@ class PrimitivesTest(Streamable): "set optional", "foobar", b"\0\1", - bytes32([0] * 32), + bytes32.zeros, False, ) diff --git a/chia/_tests/core/util/test_streamable.py b/chia/_tests/core/util/test_streamable.py index 52d416ba6e1c..77761bde592f 100644 --- a/chia/_tests/core/util/test_streamable.py +++ b/chia/_tests/core/util/test_streamable.py @@ -493,7 +493,7 @@ class TestClass3(Streamable): def test_json(bt: BlockTools) -> None: - block = bt.create_genesis_block(test_constants, bytes32([0] * 32), uint64(0)) + block = bt.create_genesis_block(test_constants, bytes32.zeros, uint64(0)) dict_block = block.to_json_dict() assert FullBlock.from_json_dict(dict_block) == block diff --git a/chia/_tests/plot_sync/test_plot_sync.py b/chia/_tests/plot_sync/test_plot_sync.py index eec0f086553d..65df244e8ade 100644 --- a/chia/_tests/plot_sync/test_plot_sync.py +++ b/chia/_tests/plot_sync/test_plot_sync.py @@ -67,7 +67,7 @@ def create_mock_plot(info: MockPlotInfo) -> Plot: return Plot( info.prover.get_filename(), uint8(0), - bytes32(b"\x00" * 32), + bytes32.zeros, None, None, G1Element(), diff --git a/chia/_tests/pools/test_pool_rpc.py b/chia/_tests/pools/test_pool_rpc.py index f0e0e4bf9587..257acc246800 100644 --- a/chia/_tests/pools/test_pool_rpc.py +++ b/chia/_tests/pools/test_pool_rpc.py @@ -681,7 +681,7 @@ async def test_self_pooling_to_pooling(self, setup: Setup, fee: uint64, self_hos pytest.skip("need to fix this test for non-zero fees") full_node_api, wallet_node, our_ph, total_block_rewards, client = setup - pool_ph = bytes32([0] * 32) + pool_ph = bytes32.zeros assert wallet_node._wallet_state_manager is not None @@ -762,7 +762,7 @@ async def status_is_farming_to_pool(w_id: int) -> bool: async def test_leave_pool(self, setup: Setup, fee: uint64, self_hostname: str) -> None: """This tests self-pooling -> pooling -> escaping -> self pooling""" full_node_api, wallet_node, our_ph, total_block_rewards, client = setup - pool_ph = bytes32([0] * 32) + pool_ph = bytes32.zeros assert len(await client.get_wallets(WalletType.POOLING_WALLET)) == 0 @@ -861,8 +861,8 @@ async def status_is_self_pooling() -> bool: async def test_change_pools(self, setup: Setup, fee: uint64, self_hostname: str) -> None: """This tests Pool A -> escaping -> Pool B""" full_node_api, wallet_node, our_ph, total_block_rewards, client = setup - pool_a_ph = bytes32([0] * 32) - pool_b_ph = bytes32([0] * 32) + pool_a_ph = bytes32.zeros + pool_b_ph = bytes32.zeros WAIT_SECS = 200 assert len(await client.get_wallets(WalletType.POOLING_WALLET)) == 0 @@ -929,8 +929,8 @@ async def status_is_leaving() -> bool: async def test_change_pools_reorg(self, setup: Setup, fee: uint64, self_hostname: str) -> None: """This tests Pool A -> escaping -> reorg -> escaping -> Pool B""" full_node_api, wallet_node, our_ph, total_block_rewards, client = setup - pool_a_ph = bytes32([0] * 32) - pool_b_ph = bytes32([0] * 32) + pool_a_ph = bytes32.zeros + pool_b_ph = bytes32.zeros WAIT_SECS = 30 assert len(await client.get_wallets(WalletType.POOLING_WALLET)) == 0 diff --git a/chia/_tests/simulation/test_simulation.py b/chia/_tests/simulation/test_simulation.py index 5557e2465c75..45be72bae296 100644 --- a/chia/_tests/simulation/test_simulation.py +++ b/chia/_tests/simulation/test_simulation.py @@ -313,7 +313,7 @@ async def test_simulation_farm_blocks( peak = full_node_api.full_node.blockchain.get_peak() assert isinstance(peak, BlockRecord) start_time = peak.timestamp - await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(bytes32([0] * 32))) + await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(bytes32.zeros)) peak = full_node_api.full_node.blockchain.get_peak() assert isinstance(peak, BlockRecord) end_time = peak.timestamp diff --git a/chia/_tests/wallet/cat_wallet/test_cat_outer_puzzle.py b/chia/_tests/wallet/cat_wallet/test_cat_outer_puzzle.py index 2fda2bfb6eff..67894a8e106f 100644 --- a/chia/_tests/wallet/cat_wallet/test_cat_outer_puzzle.py +++ b/chia/_tests/wallet/cat_wallet/test_cat_outer_puzzle.py @@ -19,7 +19,7 @@ def test_cat_outer_puzzle() -> None: ACS = Program.to(1) - tail = bytes32([0] * 32) + tail = bytes32.zeros cat_puzzle: Program = construct_cat_puzzle(CAT_MOD, tail, ACS) double_cat_puzzle: Program = construct_cat_puzzle(CAT_MOD, tail, cat_puzzle) uncurried_cat_puzzle = uncurry_puzzle(double_cat_puzzle) diff --git a/chia/_tests/wallet/cat_wallet/test_cat_wallet.py b/chia/_tests/wallet/cat_wallet/test_cat_wallet.py index 03a3c338f7db..7989fe6ac687 100644 --- a/chia/_tests/wallet/cat_wallet/test_cat_wallet.py +++ b/chia/_tests/wallet/cat_wallet/test_cat_wallet.py @@ -1170,14 +1170,14 @@ async def check_all_there() -> bool: async with cat_wallet.wallet_state_manager.new_action_scope( wallet_environments.tx_config, push=False ) as action_scope: - await cat_wallet.generate_signed_transaction([uint64(max_sent_amount - 1)], [bytes32([0] * 32)], action_scope) + await cat_wallet.generate_signed_transaction([uint64(max_sent_amount - 1)], [bytes32.zeros], action_scope) assert action_scope.side_effects.transactions[0].amount == uint64(max_sent_amount - 1) # 2) Generate transaction that is equal to limit async with cat_wallet.wallet_state_manager.new_action_scope( wallet_environments.tx_config, push=False ) as action_scope: - await cat_wallet.generate_signed_transaction([uint64(max_sent_amount)], [bytes32([0] * 32)], action_scope) + await cat_wallet.generate_signed_transaction([uint64(max_sent_amount)], [bytes32.zeros], action_scope) assert action_scope.side_effects.transactions[0].amount == uint64(max_sent_amount) # 3) Generate transaction that is greater than limit @@ -1185,9 +1185,7 @@ async def check_all_there() -> bool: async with cat_wallet.wallet_state_manager.new_action_scope( wallet_environments.tx_config, push=False ) as action_scope: - await cat_wallet.generate_signed_transaction( - [uint64(max_sent_amount + 1)], [bytes32([0] * 32)], action_scope - ) + await cat_wallet.generate_signed_transaction([uint64(max_sent_amount + 1)], [bytes32.zeros], action_scope) @pytest.mark.limit_consensus_modes(allowed=[ConsensusMode.PLAIN], reason="irrelevant") @@ -1537,7 +1535,7 @@ async def test_cat_change_detection(wallet_environments: WalletTestFramework) -> 1, [ [51, inner_puzhash, cat_amount_1], - [51, bytes32([0] * 32), cat_amount_0 - cat_amount_1], + [51, bytes32.zeros, cat_amount_0 - cat_amount_1], ], ), None, @@ -1590,7 +1588,7 @@ def asset_id(i: int) -> bytes32: return bytes32([i] * 32) def coin_state(i: int) -> CoinState: - return CoinState(Coin(bytes32([0] * 32), bytes32([0] * 32), uint64(i)), None, None) + return CoinState(Coin(bytes32.zeros, bytes32.zeros, uint64(i)), None, None) await interested_store.add_unacknowledged_coin_state(asset_id(0), coin_state(0), None) await interested_store.add_unacknowledged_coin_state(asset_id(1), coin_state(1), 100) diff --git a/chia/_tests/wallet/cat_wallet/test_offer_lifecycle.py b/chia/_tests/wallet/cat_wallet/test_offer_lifecycle.py index c4648f791e9c..d4d423fa17e4 100644 --- a/chia/_tests/wallet/cat_wallet/test_offer_lifecycle.py +++ b/chia/_tests/wallet/cat_wallet/test_offer_lifecycle.py @@ -252,7 +252,7 @@ async def test_complex_offer(cost_logger: CostLogger) -> None: # Test preventing TAIL from running during exchange blue_cat_puz = construct_cat_puzzle(CAT_MOD, str_to_tail_hash("blue"), OFFER_MOD) - random_hash = bytes32([0] * 32) + random_hash = bytes32.zeros blue_spend = make_spend( Coin(random_hash, blue_cat_puz.get_tree_hash(), uint64(0)), blue_cat_puz, diff --git a/chia/_tests/wallet/cat_wallet/test_trades.py b/chia/_tests/wallet/cat_wallet/test_trades.py index 6b38db26a031..c50971fe709f 100644 --- a/chia/_tests/wallet/cat_wallet/test_trades.py +++ b/chia/_tests/wallet/cat_wallet/test_trades.py @@ -1701,7 +1701,7 @@ async def test_trade_cancellation(wallet_environments: WalletTestFramework) -> N wallet_environments.tx_config, push=False ) as action_scope: await trade_manager_maker.cancel_pending_offers( - [trade_make.trade_id, bytes32([0] * 32)], action_scope, secure=False + [trade_make.trade_id, bytes32.zeros], action_scope, secure=False ) await time_out_assert(15, get_trade_and_status, TradeStatus.CANCELLED, trade_manager_maker, trade_make) @@ -1851,7 +1851,7 @@ async def test_trade_cancellation(wallet_environments: WalletTestFramework) -> N chia_and_cat_for_something: OfferSummary = { env_maker.wallet_aliases["xch"]: -5, env_maker.wallet_aliases["cat"]: -6, - bytes32([0] * 32): 1, # Doesn't matter + bytes32.zeros: 1, # Doesn't matter } # Now we're going to create the other way around for test coverage sake @@ -1861,7 +1861,7 @@ async def test_trade_cancellation(wallet_environments: WalletTestFramework) -> N success, trade_make, error = await trade_manager_maker.create_offer_for_ids( chia_and_cat_for_something, action_scope, - driver_dict={bytes32([0] * 32): PuzzleInfo({"type": AssetType.CAT.value, "tail": "0x" + bytes(32).hex()})}, + driver_dict={bytes32.zeros: PuzzleInfo({"type": AssetType.CAT.value, "tail": "0x" + bytes(32).hex()})}, ) assert error is None assert success is True diff --git a/chia/_tests/wallet/conftest.py b/chia/_tests/wallet/conftest.py index 3fbcce66e1da..3f2df050e49f 100644 --- a/chia/_tests/wallet/conftest.py +++ b/chia/_tests/wallet/conftest.py @@ -87,7 +87,7 @@ async def new_create(*args: Any, **kwargs: Any) -> Any: "chia.consensus.multiprocess_validation.validate_finished_header_block", lambda *_, **__: (uint64(1), None) ) monkeypatch.setattr( - "chia.consensus.multiprocess_validation.verify_and_get_quality_string", lambda *_, **__: bytes32([0] * 32) + "chia.consensus.multiprocess_validation.verify_and_get_quality_string", lambda *_, **__: bytes32.zeros ) monkeypatch.setattr("chia.consensus.block_record.BlockRecord.sp_total_iters", lambda *_: uint128(0)) monkeypatch.setattr("chia.consensus.block_record.BlockRecord.ip_sub_slot_total_iters", lambda *_: uint128(0)) diff --git a/chia/_tests/wallet/db_wallet/test_db_graftroot.py b/chia/_tests/wallet/db_wallet/test_db_graftroot.py index edd4f40dc3ca..9bbfa66d256a 100644 --- a/chia/_tests/wallet/db_wallet/test_db_graftroot.py +++ b/chia/_tests/wallet/db_wallet/test_db_graftroot.py @@ -43,7 +43,7 @@ async def test_graftroot(cost_logger: CostLogger) -> None: all_values: list[bytes32] = [bytes32([x] * 32) for x in range(0, 100)] root, proofs = build_merkle_tree(all_values) p2_conditions = Program.to((1, [[51, ACS_PH, 0]])) # An coin to create to make sure this hits the blockchain - desired_key_values = ((bytes32([0] * 32), bytes32([1] * 32)), (bytes32([7] * 32), bytes32([8] * 32))) + desired_key_values = ((bytes32.zeros, bytes32([1] * 32)), (bytes32([7] * 32), bytes32([8] * 32))) desired_row_hashes: list[bytes32] = [build_merkle_tree_from_binary_tree(kv)[0] for kv in desired_key_values] fake_struct: Program = Program.to((ACS_PH, NIL_PH)) graftroot_puzzle: Program = GRAFTROOT_MOD.curry( @@ -129,7 +129,7 @@ def filter_none(values: list[bytes32]) -> list[bytes32]: # try with a bad merkle root announcement new_fake_spend = make_spend( fake_coin, - ACS.curry(fake_struct, ACS.curry(ACS_PH, (bytes32([0] * 32), None), None, None)), + ACS.curry(fake_struct, ACS.curry(ACS_PH, (bytes32.zeros, None), None, None)), Program.to([[[62, "$"]]]), ) new_final_bundle = WalletSpendBundle([new_fake_spend, graftroot_spend], G2Element()) diff --git a/chia/_tests/wallet/db_wallet/test_dl_wallet.py b/chia/_tests/wallet/db_wallet/test_dl_wallet.py index d8df878219ce..377c4e2178b6 100644 --- a/chia/_tests/wallet/db_wallet/test_dl_wallet.py +++ b/chia/_tests/wallet/db_wallet/test_dl_wallet.py @@ -426,7 +426,7 @@ async def is_singleton_confirmed(wallet: DataLayerWallet, lid: bytes32) -> bool: assert current_record != record_1 async with dl_wallet_0.wallet_state_manager.new_action_scope(DEFAULT_TX_CONFIG, push=True) as action_scope: await dl_wallet_0.create_update_state_spend( - launcher_id, bytes32([0] * 32), action_scope, fee=uint64(2000000000000) + launcher_id, bytes32.zeros, action_scope, fee=uint64(2000000000000) ) update_txs = action_scope.side_effects.transactions record_0 = await dl_wallet_0.get_latest_singleton(launcher_id) @@ -569,23 +569,23 @@ async def test_mirrors(wallets_prefarm: Any, trusted: bool) -> None: dl_wallet_2 = await DataLayerWallet.create_new_dl_wallet(wsm_2) async with dl_wallet_1.wallet_state_manager.new_action_scope(DEFAULT_TX_CONFIG, push=True) as action_scope: - launcher_id_1 = await dl_wallet_1.generate_new_reporter(bytes32([0] * 32), action_scope) + launcher_id_1 = await dl_wallet_1.generate_new_reporter(bytes32.zeros, action_scope) assert await dl_wallet_1.get_latest_singleton(launcher_id_1) is not None await full_node_api.process_transaction_records(records=action_scope.side_effects.transactions) - await time_out_assert(15, is_singleton_confirmed_and_root, True, dl_wallet_1, launcher_id_1, bytes32([0] * 32)) + await time_out_assert(15, is_singleton_confirmed_and_root, True, dl_wallet_1, launcher_id_1, bytes32.zeros) async with dl_wallet_2.wallet_state_manager.new_action_scope(DEFAULT_TX_CONFIG, push=True) as action_scope: - launcher_id_2 = await dl_wallet_2.generate_new_reporter(bytes32([0] * 32), action_scope) + launcher_id_2 = await dl_wallet_2.generate_new_reporter(bytes32.zeros, action_scope) assert await dl_wallet_2.get_latest_singleton(launcher_id_2) is not None await full_node_api.process_transaction_records(records=action_scope.side_effects.transactions) - await time_out_assert(15, is_singleton_confirmed_and_root, True, dl_wallet_2, launcher_id_2, bytes32([0] * 32)) + await time_out_assert(15, is_singleton_confirmed_and_root, True, dl_wallet_2, launcher_id_2, bytes32.zeros) peer_1 = wallet_node_1.get_full_node_peer() await dl_wallet_1.track_new_launcher_id(launcher_id_2, peer_1) peer_2 = wallet_node_2.get_full_node_peer() await dl_wallet_2.track_new_launcher_id(launcher_id_1, peer_2) - await time_out_assert(15, is_singleton_confirmed_and_root, True, dl_wallet_1, launcher_id_2, bytes32([0] * 32)) - await time_out_assert(15, is_singleton_confirmed_and_root, True, dl_wallet_2, launcher_id_1, bytes32([0] * 32)) + await time_out_assert(15, is_singleton_confirmed_and_root, True, dl_wallet_1, launcher_id_2, bytes32.zeros) + await time_out_assert(15, is_singleton_confirmed_and_root, True, dl_wallet_2, launcher_id_1, bytes32.zeros) async with dl_wallet_1.wallet_state_manager.new_action_scope(DEFAULT_TX_CONFIG, push=True) as action_scope: await dl_wallet_1.create_new_mirror( @@ -648,7 +648,7 @@ async def test_datalayer_reorgs(wallet_environments: WalletTestFramework) -> Non dl_wallet = await DataLayerWallet.create_new_dl_wallet(env.wallet_state_manager) async with dl_wallet.wallet_state_manager.new_action_scope(DEFAULT_TX_CONFIG, push=True) as action_scope: - launcher_id = await dl_wallet.generate_new_reporter(bytes32([0] * 32), action_scope) + launcher_id = await dl_wallet.generate_new_reporter(bytes32.zeros, action_scope) await wallet_environments.process_pending_states( [ @@ -676,16 +676,16 @@ async def test_datalayer_reorgs(wallet_environments: WalletTestFramework) -> Non ) ] ) - await time_out_assert(15, is_singleton_confirmed_and_root, True, dl_wallet, launcher_id, bytes32([0] * 32)) + await time_out_assert(15, is_singleton_confirmed_and_root, True, dl_wallet, launcher_id, bytes32.zeros) height = full_node_api.full_node.blockchain.get_peak_height() assert height is not None await full_node_api.reorg_from_index_to_new_index( - ReorgProtocol(uint32(height - 1), uint32(height + 1), bytes32([0] * 32), None) + ReorgProtocol(uint32(height - 1), uint32(height + 1), bytes32.zeros, None) ) await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node, timeout=5) - await time_out_assert(15, is_singleton_confirmed_and_root, False, dl_wallet, launcher_id, bytes32([0] * 32)) + await time_out_assert(15, is_singleton_confirmed_and_root, False, dl_wallet, launcher_id, bytes32.zeros) await wallet_environments.process_pending_states( [ @@ -713,7 +713,7 @@ async def test_datalayer_reorgs(wallet_environments: WalletTestFramework) -> Non ) ] ) - await time_out_assert(15, is_singleton_confirmed_and_root, True, dl_wallet, launcher_id, bytes32([0] * 32)) + await time_out_assert(15, is_singleton_confirmed_and_root, True, dl_wallet, launcher_id, bytes32.zeros) async with dl_wallet.wallet_state_manager.new_action_scope(DEFAULT_TX_CONFIG, push=True) as action_scope: await dl_wallet.create_update_state_spend(launcher_id, bytes32([2] * 32), action_scope) @@ -737,11 +737,11 @@ async def test_datalayer_reorgs(wallet_environments: WalletTestFramework) -> Non height = full_node_api.full_node.blockchain.get_peak_height() assert height is not None await full_node_api.reorg_from_index_to_new_index( - ReorgProtocol(uint32(height - 1), uint32(height + 1), bytes32([0] * 32), None) + ReorgProtocol(uint32(height - 1), uint32(height + 1), bytes32.zeros, None) ) await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node, timeout=5) - await time_out_assert(15, is_singleton_confirmed_and_root, False, dl_wallet, launcher_id, bytes32([0] * 32)) + await time_out_assert(15, is_singleton_confirmed_and_root, False, dl_wallet, launcher_id, bytes32.zeros) await wallet_environments.process_pending_states( [ @@ -790,7 +790,7 @@ async def test_datalayer_reorgs(wallet_environments: WalletTestFramework) -> Non height = full_node_api.full_node.blockchain.get_peak_height() assert height is not None await full_node_api.reorg_from_index_to_new_index( - ReorgProtocol(uint32(height - 1), uint32(height + 1), bytes32([0] * 32), None) + ReorgProtocol(uint32(height - 1), uint32(height + 1), bytes32.zeros, None) ) await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node, timeout=5) diff --git a/chia/_tests/wallet/nft_wallet/test_nft_1_offers.py b/chia/_tests/wallet/nft_wallet/test_nft_1_offers.py index 656d8f78c412..5dd761983a41 100644 --- a/chia/_tests/wallet/nft_wallet/test_nft_1_offers.py +++ b/chia/_tests/wallet/nft_wallet/test_nft_1_offers.py @@ -412,7 +412,7 @@ async def test_nft_offer_sell_did_to_did( 20, full_node_api.full_node.mempool_manager.get_spendbundle, tx.spend_bundle.name() ) - await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(bytes32([0] * 32))) + await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(bytes32.zeros)) await full_node_api.wait_for_wallets_synced(wallet_nodes=[wallet_node_maker, wallet_node_taker], timeout=20) await time_out_assert(20, get_nft_count, 1, nft_wallet_maker) @@ -1084,7 +1084,7 @@ async def get_trade_and_status(trade_manager: Any, trade: Any) -> TradeStatus: await full_node_api.process_transaction_records(records=action_scope.side_effects.transactions) for i in range(1, num_blocks): - await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(bytes32([0] * 32))) + await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(bytes32.zeros)) await time_out_assert(15, get_trade_and_status, TradeStatus.CANCELLED, trade_manager_maker, trade_make) diff --git a/chia/_tests/wallet/nft_wallet/test_nft_lifecycle.py b/chia/_tests/wallet/nft_wallet/test_nft_lifecycle.py index e8f8d71f5f34..90ccdd91532f 100644 --- a/chia/_tests/wallet/nft_wallet/test_nft_lifecycle.py +++ b/chia/_tests/wallet/nft_wallet/test_nft_lifecycle.py @@ -139,7 +139,7 @@ async def test_state_layer(cost_logger: CostLogger, metadata_updater: str) -> No @pytest.mark.anyio async def test_ownership_layer(cost_logger: CostLogger) -> None: async with sim_and_client() as (sim, sim_client): - TARGET_OWNER = bytes32([0] * 32) + TARGET_OWNER = bytes32.zeros TARGET_TP = Program.to([8]) # (x) # (a (i 11 (q 4 19 (c 43 (q ()))) (q 8)) 1) or # (mod (_ _ solution) (if solution (list (f solution) (f (r solution)) ()) (x))) @@ -191,7 +191,7 @@ async def test_ownership_layer(cost_logger: CostLogger) -> None: [ [51, ACS_PH, 1], [-10, TARGET_OWNER, TARGET_TP], - [62, b"\xad\x4c" + bytes32([0] * 32)], + [62, b"\xad\x4c" + bytes32.zeros], ] ] ), @@ -251,7 +251,7 @@ async def test_default_transfer_program(cost_logger: CostLogger) -> None: # Now make the ownership coin FAKE_SINGLETON_MOD = Program.to([2, 5, 11]) # (a 5 11) | (mod (_ INNER_PUZ inner_sol) (a INNER_PUZ inner_sol)) FAKE_CAT_MOD = Program.to([2, 11, 23]) # (a 11 23) or (mod (_ _ INNER_PUZ inner_sol) (a INNER_PUZ inner_sol)) - FAKE_LAUNCHER_ID = bytes32([0] * 32) + FAKE_LAUNCHER_ID = bytes32.zeros FAKE_TAIL = bytes32([2] * 32) FAKE_SINGLETON_STRUCT = Program.to((FAKE_SINGLETON_MOD.get_tree_hash(), (FAKE_LAUNCHER_ID, FAKE_LAUNCHER_ID))) FAKE_SINGLETON = FAKE_SINGLETON_MOD.curry(FAKE_SINGLETON_STRUCT, ACS) diff --git a/chia/_tests/wallet/nft_wallet/test_nft_wallet.py b/chia/_tests/wallet/nft_wallet/test_nft_wallet.py index 0ac39f547e61..f5a5dcd841db 100644 --- a/chia/_tests/wallet/nft_wallet/test_nft_wallet.py +++ b/chia/_tests/wallet/nft_wallet/test_nft_wallet.py @@ -298,7 +298,7 @@ async def test_nft_wallet_creation_and_transfer(wallet_environments: WalletTestF height = full_node_api.full_node.blockchain.get_peak_height() assert height is not None await full_node_api.reorg_from_index_to_new_index( - ReorgProtocol(uint32(height - 1), uint32(height + 1), bytes32([0] * 32), None) + ReorgProtocol(uint32(height - 1), uint32(height + 1), bytes32.zeros, None) ) await time_out_assert(60, full_node_api.full_node.blockchain.get_peak_height, height + 1) await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0) @@ -477,7 +477,7 @@ async def test_nft_wallet_creation_and_transfer(wallet_environments: WalletTestF height = full_node_api.full_node.blockchain.get_peak_height() assert height is not None await full_node_api.reorg_from_index_to_new_index( - ReorgProtocol(uint32(height - 1), uint32(height + 2), bytes32([0] * 32), None) + ReorgProtocol(uint32(height - 1), uint32(height + 2), bytes32.zeros, None) ) await full_node_api.wait_for_self_synced() diff --git a/chia/_tests/wallet/nft_wallet/test_ownership_outer_puzzle.py b/chia/_tests/wallet/nft_wallet/test_ownership_outer_puzzle.py index 067a0b5cdad1..246ff273ee24 100644 --- a/chia/_tests/wallet/nft_wallet/test_ownership_outer_puzzle.py +++ b/chia/_tests/wallet/nft_wallet/test_ownership_outer_puzzle.py @@ -17,7 +17,7 @@ def test_ownership_outer_puzzle() -> None: ACS = Program.to(1) NIL = Program.to([]) - owner = bytes32([0] * 32) + owner = bytes32.zeros # (mod (current_owner conditions solution) # (list current_owner () conditions) # ) diff --git a/chia/_tests/wallet/rpc/test_dl_wallet_rpc.py b/chia/_tests/wallet/rpc/test_dl_wallet_rpc.py index 3da61a9114c1..901f261b4151 100644 --- a/chia/_tests/wallet/rpc/test_dl_wallet_rpc.py +++ b/chia/_tests/wallet/rpc/test_dl_wallet_rpc.py @@ -77,11 +77,11 @@ async def test_wallet_make_transaction( await validate_get_routes(client_2, wallet_services[1].rpc_server.rpc_api) try: - merkle_root: bytes32 = bytes32([0] * 32) + merkle_root: bytes32 = bytes32.zeros txs, launcher_id = await client.create_new_dl(merkle_root, uint64(50)) for i in range(0, 5): - await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(bytes32([0] * 32))) + await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(bytes32.zeros)) await asyncio.sleep(0.5) async def is_singleton_confirmed(rpc_client: WalletRpcClient, lid: bytes32) -> bool: @@ -99,7 +99,7 @@ async def is_singleton_confirmed(rpc_client: WalletRpcClient, lid: bytes32) -> b await client.dl_update_root(launcher_id, new_root, uint64(100)) for i in range(0, 5): - await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(bytes32([0] * 32))) + await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(bytes32.zeros)) await asyncio.sleep(0.5) new_singleton_record = await client.dl_latest_singleton(launcher_id) @@ -163,7 +163,7 @@ async def is_singleton_generation(rpc_client: WalletRpcClient, lid: bytes32, gen txs, launcher_id_3 = await client.create_new_dl(merkle_root, uint64(50)) for i in range(0, 5): - await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(bytes32([0] * 32))) + await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(bytes32.zeros)) await asyncio.sleep(0.5) await time_out_assert(15, is_singleton_confirmed, True, client, launcher_id_2) @@ -180,7 +180,7 @@ async def is_singleton_generation(rpc_client: WalletRpcClient, lid: bytes32, gen ) for i in range(0, 5): - await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(bytes32([0] * 32))) + await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(bytes32.zeros)) await asyncio.sleep(0.5) await time_out_assert(15, is_singleton_confirmed, True, client, launcher_id) @@ -204,7 +204,7 @@ async def is_singleton_generation(rpc_client: WalletRpcClient, lid: bytes32, gen height = full_node_api.full_node.blockchain.get_peak_height() assert height is not None for i in range(0, 5): - await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(bytes32([0] * 32))) + await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(bytes32.zeros)) await asyncio.sleep(0.5) additions = [] for tx in txs: @@ -222,7 +222,7 @@ async def is_singleton_generation(rpc_client: WalletRpcClient, lid: bytes32, gen await time_out_assert(15, client.dl_get_mirrors, [mirror], launcher_id) await client.dl_delete_mirror(mirror_coin.name(), fee=uint64(2000000000000)) for i in range(0, 5): - await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(bytes32([0] * 32))) + await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(bytes32.zeros)) await asyncio.sleep(0.5) await time_out_assert(15, client.dl_get_mirrors, [], launcher_id) diff --git a/chia/_tests/wallet/rpc/test_wallet_rpc.py b/chia/_tests/wallet/rpc/test_wallet_rpc.py index 06a4aaa589c2..db4c17f9eec1 100644 --- a/chia/_tests/wallet/rpc/test_wallet_rpc.py +++ b/chia/_tests/wallet/rpc/test_wallet_rpc.py @@ -329,7 +329,7 @@ async def test_send_transaction(wallet_rpc_environment: WalletRpcTestEnvironment # Tests sending a basic transaction extra_conditions = (Remark(Program.to(("test", None))),) - non_existent_coin = Coin(bytes32([0] * 32), bytes32([0] * 32), uint64(0)) + non_existent_coin = Coin(bytes32.zeros, bytes32.zeros, uint64(0)) tx_no_push = ( await client.send_transaction( 1, @@ -495,7 +495,7 @@ async def test_get_farmed_amount_with_fee(wallet_rpc_environment: WalletRpcTestE async with wallet.wallet_state_manager.new_action_scope(DEFAULT_TX_CONFIG, push=True) as action_scope: await wallet.generate_signed_transaction( amount=uint64(5), - puzzle_hash=bytes32([0] * 32), + puzzle_hash=bytes32.zeros, action_scope=action_scope, fee=uint64(fee_amount), ) @@ -1073,7 +1073,7 @@ async def test_cat_endpoints(wallet_rpc_environment: WalletRpcTestEnvironment): wid, name = result assert wid == cat_0_id assert name == "My cat" - result = await client.cat_asset_id_to_name(bytes32([0] * 32)) + result = await client.cat_asset_id_to_name(bytes32.zeros) assert result is None verified_asset_id = next(iter(DEFAULT_CATS.items()))[1]["asset_id"] result = await client.cat_asset_id_to_name(bytes32.from_hexstr(verified_asset_id)) @@ -1124,7 +1124,7 @@ async def test_cat_endpoints(wallet_rpc_environment: WalletRpcTestEnvironment): cat_0_id, DEFAULT_TX_CONFIG.override( excluded_coin_amounts=[uint64(20)], - excluded_coin_ids=[bytes32([0] * 32)], + excluded_coin_ids=[bytes32.zeros], ), uint64(4), addr_1, @@ -1417,7 +1417,7 @@ def only_ids(trades): driver_dict=driver_dict, ) assert len([o for o in await wallet_1_rpc.get_all_offers() if o.status == TradeStatus.PENDING_ACCEPT.value]) == 1 - await wallet_1_rpc.cancel_offers(DEFAULT_TX_CONFIG, asset_id=bytes32([0] * 32)) + await wallet_1_rpc.cancel_offers(DEFAULT_TX_CONFIG, asset_id=bytes32.zeros) assert len([o for o in await wallet_1_rpc.get_all_offers() if o.status == TradeStatus.PENDING_ACCEPT.value]) == 1 await wallet_1_rpc.cancel_offers(DEFAULT_TX_CONFIG, asset_id=cat_asset_id) assert len([o for o in await wallet_1_rpc.get_all_offers() if o.status == TradeStatus.PENDING_ACCEPT.value]) == 0 @@ -2692,7 +2692,7 @@ async def test_split_coins(wallet_environments: WalletTestFramework) -> None: with pytest.raises(ResponseFailureError, match="Could not find coin with ID 00000000000000000"): await env.rpc_client.split_coins( - dataclasses.replace(xch_request, target_coin_id=bytes32([0] * 32)), + dataclasses.replace(xch_request, target_coin_id=bytes32.zeros), wallet_environments.tx_config, ) @@ -2878,7 +2878,7 @@ async def test_combine_coins(wallet_environments: WalletTestFramework) -> None: with pytest.raises(ResponseFailureError, match="More coin IDs specified than desired number of coins to combine"): await env.rpc_client.combine_coins( - dataclasses.replace(xch_combine_request, target_coin_ids=[bytes32([0] * 32)] * 100), + dataclasses.replace(xch_combine_request, target_coin_ids=[bytes32.zeros] * 100), wallet_environments.tx_config, ) diff --git a/chia/_tests/wallet/sync/test_wallet_sync.py b/chia/_tests/wallet/sync/test_wallet_sync.py index 5f31a3868d9e..7fc03d64eedf 100644 --- a/chia/_tests/wallet/sync/test_wallet_sync.py +++ b/chia/_tests/wallet/sync/test_wallet_sync.py @@ -1244,7 +1244,7 @@ async def test_retry_store( full_node_api = full_nodes[0] full_node_server = full_node_api.full_node.server - await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(bytes32([0] * 32))) + await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(bytes32.zeros)) # Trusted node sync wallets[0][0].config["trusted_peers"] = {full_node_server.node_id.hex(): full_node_server.node_id.hex()} @@ -1285,7 +1285,7 @@ async def new_func(request: wallet_protocol.RequestPuzzleSolution) -> Optional[M if not request_puzzle_solution_failure_tested: request_puzzle_solution_failure_tested = True # This can just return None if we have `none_response` enabled. - reject = wallet_protocol.RejectPuzzleSolution(bytes32([0] * 32), uint32(0)) + reject = wallet_protocol.RejectPuzzleSolution(bytes32.zeros, uint32(0)) return make_msg(ProtocolMessageTypes.reject_puzzle_solution, reject) else: return await func(request) @@ -1362,7 +1362,7 @@ async def new_func(puzzle_hash: bytes32) -> Optional[WalletIdentifier]: wallet = wallet_node.wallet_state_manager.main_wallet ph = await wallet.get_new_puzzlehash() await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(ph)) - await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(bytes32([0] * 32))) + await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(bytes32.zeros)) async def retry_store_empty() -> bool: return len(await wallet_node.wallet_state_manager.retry_store.get_all_states_to_retry()) == 0 @@ -1377,7 +1377,7 @@ async def assert_coin_state_retry() -> None: async with wallet.wallet_state_manager.new_action_scope(DEFAULT_TX_CONFIG, push=True) as action_scope: await wallet.generate_signed_transaction( - uint64(1_000_000_000_000), bytes32([0] * 32), action_scope, memos=[ph] + uint64(1_000_000_000_000), bytes32.zeros, action_scope, memos=[ph] ) [tx] = action_scope.side_effects.transactions await time_out_assert(30, wallet.get_confirmed_balance, 2_000_000_000_000) @@ -1386,7 +1386,7 @@ async def tx_in_mempool() -> bool: return full_node_api.full_node.mempool_manager.get_spendbundle(tx.name) is not None await time_out_assert(15, tx_in_mempool) - await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(bytes32([0] * 32))) + await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(bytes32.zeros)) await assert_coin_state_retry() diff --git a/chia/_tests/wallet/test_clvm_streamable.py b/chia/_tests/wallet/test_clvm_streamable.py index 23eb592ba786..d4740ae3a9df 100644 --- a/chia/_tests/wallet/test_clvm_streamable.py +++ b/chia/_tests/wallet/test_clvm_streamable.py @@ -190,7 +190,7 @@ def test_translation_layer() -> None: ] ) - coin = Coin(bytes32([0] * 32), bytes32([0] * 32), uint64(0)) + coin = Coin(bytes32.zeros, bytes32.zeros, uint64(0)) spend = Spend( coin, Program.to("puzzle"), diff --git a/chia/_tests/wallet/test_conditions.py b/chia/_tests/wallet/test_conditions.py index 1f17657c6781..53159c637dfc 100644 --- a/chia/_tests/wallet/test_conditions.py +++ b/chia/_tests/wallet/test_conditions.py @@ -75,7 +75,7 @@ def program(self) -> Program: return prog -HASH: bytes32 = bytes32([0] * 32) +HASH: bytes32 = bytes32.zeros HASH_HEX: str = HASH.hex() PK: bytes = b"\xc0" + bytes(47) PK_HEX: str = PK.hex() diff --git a/chia/_tests/wallet/test_debug_spend_bundle.py b/chia/_tests/wallet/test_debug_spend_bundle.py index b285aed45d27..2bdc31207ca4 100644 --- a/chia/_tests/wallet/test_debug_spend_bundle.py +++ b/chia/_tests/wallet/test_debug_spend_bundle.py @@ -23,22 +23,22 @@ def test_debug_spend_bundle() -> None: sig = AugSchemeMPL.sign(sk, msg) ACS = Program.to(15).curry(Program.to("hey").curry("now")).curry("brown", "cow") ACS_PH = ACS.get_tree_hash() - coin: Coin = Coin(bytes32([0] * 32), ACS_PH, uint64(3)) + coin: Coin = Coin(bytes32.zeros, ACS_PH, uint64(3)) child_coin: Coin = Coin(coin.name(), ACS_PH, uint64(0)) - coin_bad_reveal: Coin = Coin(bytes32([0] * 32), bytes32([0] * 32), uint64(0)) + coin_bad_reveal: Coin = Coin(bytes32.zeros, bytes32.zeros, uint64(0)) solution = Program.to( [ [ConditionOpcode.AGG_SIG_UNSAFE, pk, msg], [ConditionOpcode.REMARK], [ConditionOpcode.CREATE_COIN, ACS_PH, 0], - [ConditionOpcode.CREATE_COIN, bytes32([0] * 32), 1], - [ConditionOpcode.CREATE_COIN, bytes32([0] * 32), 2, [b"memo", b"memo", b"memo"]], + [ConditionOpcode.CREATE_COIN, bytes32.zeros, 1], + [ConditionOpcode.CREATE_COIN, bytes32.zeros, 2, [b"memo", b"memo", b"memo"]], [ConditionOpcode.CREATE_COIN_ANNOUNCEMENT, None], - [ConditionOpcode.ASSERT_COIN_ANNOUNCEMENT, bytes32([0] * 32)], + [ConditionOpcode.ASSERT_COIN_ANNOUNCEMENT, bytes32.zeros], [ConditionOpcode.ASSERT_COIN_ANNOUNCEMENT, std_hash(coin.name())], [ConditionOpcode.CREATE_COIN_ANNOUNCEMENT, b"hey"], [ConditionOpcode.CREATE_PUZZLE_ANNOUNCEMENT, None], - [ConditionOpcode.ASSERT_PUZZLE_ANNOUNCEMENT, bytes32([0] * 32)], + [ConditionOpcode.ASSERT_PUZZLE_ANNOUNCEMENT, bytes32.zeros], [ConditionOpcode.ASSERT_PUZZLE_ANNOUNCEMENT, std_hash(coin.puzzle_hash)], [ConditionOpcode.CREATE_PUZZLE_ANNOUNCEMENT, b"hey"], ] diff --git a/chia/_tests/wallet/test_new_wallet_protocol.py b/chia/_tests/wallet/test_new_wallet_protocol.py index 909279ae20e2..e039670747e1 100644 --- a/chia/_tests/wallet/test_new_wallet_protocol.py +++ b/chia/_tests/wallet/test_new_wallet_protocol.py @@ -263,7 +263,7 @@ async def test_request_coin_state(one_node: OneNode, self_hostname: str) -> None # Add coin records coin_records = [ CoinRecord( - coin=Coin(bytes32(b"\0" * 32), bytes32(b"\0" * 32), uint64(i)), + coin=Coin(bytes32.zeros, bytes32.zeros, uint64(i)), confirmed_block_index=uint32(1), spent_block_index=uint32(1 if i % 2 == 0 else 0), coinbase=False, @@ -340,7 +340,7 @@ async def test_request_coin_state_reorg(one_node: OneNode, self_hostname: str) - # Reorg await simulator.reorg_from_index_to_new_index( - simulator_protocol.ReorgProtocol(uint32(3), uint32(10), bytes32(b"\1" * 32), bytes32(b"\0" * 32)) + simulator_protocol.ReorgProtocol(uint32(3), uint32(10), bytes32(b"\1" * 32), bytes32.zeros) ) # Request coin state, should reject due to reorg @@ -425,7 +425,7 @@ async def test_request_puzzle_state(one_node: OneNode, self_hostname: str) -> No for i in range(5): coin_records.append( CoinRecord( - coin=Coin(bytes32(b"\0" * 32), puzzle_hash, uint64(i)), + coin=Coin(bytes32.zeros, puzzle_hash, uint64(i)), confirmed_block_index=uint32(1), spent_block_index=uint32(1 if i % 2 == 0 else 0), coinbase=False, @@ -525,7 +525,7 @@ async def test_request_puzzle_state_reorg(one_node: OneNode, self_hostname: str) # Reorg await simulator.reorg_from_index_to_new_index( - simulator_protocol.ReorgProtocol(uint32(3), uint32(10), bytes32(b"\1" * 32), bytes32(b"\0" * 32)) + simulator_protocol.ReorgProtocol(uint32(3), uint32(10), bytes32(b"\1" * 32), bytes32.zeros) ) # Request coin state, should reject due to reorg @@ -711,7 +711,7 @@ async def test_sync_puzzle_state( if rng.choice([True, False, False, False, False]): coin_ph = std_hash(coin_ph) - coin = Coin(bytes32(b"\0" * 32), coin_ph, uint64(base_amount + added_amount)) + coin = Coin(bytes32.zeros, coin_ph, uint64(base_amount + added_amount)) coin_records[coin.name()] = CoinRecord( coin=coin, @@ -847,8 +847,8 @@ async def raw_mpu_setup(one_node: OneNode, self_hostname: str, no_capability: bo async def make_coin(full_node: FullNode) -> tuple[Coin, bytes32]: ph = IDENTITY_PUZZLE_HASH - coin = Coin(bytes32(b"\0" * 32), ph, uint64(1000)) - hint = bytes32(b"\0" * 32) + coin = Coin(bytes32.zeros, ph, uint64(1000)) + hint = bytes32.zeros height = full_node.blockchain.get_peak_height() assert height is not None diff --git a/chia/_tests/wallet/test_notifications.py b/chia/_tests/wallet/test_notifications.py index 13d0ca599ade..d0647cb86fd0 100644 --- a/chia/_tests/wallet/test_notifications.py +++ b/chia/_tests/wallet/test_notifications.py @@ -36,7 +36,7 @@ async def test_notification_store_backwards_compat() -> None: cursor = await conn.execute( "INSERT OR REPLACE INTO notifications (coin_id, msg, amount) VALUES(?, ?, ?)", ( - bytes32([0] * 32), + bytes32.zeros, bytes([0] * 10), bytes([0]), ), diff --git a/chia/_tests/wallet/test_sign_coin_spends.py b/chia/_tests/wallet/test_sign_coin_spends.py index 5b60e3e79487..de4905fa42db 100644 --- a/chia/_tests/wallet/test_sign_coin_spends.py +++ b/chia/_tests/wallet/test_sign_coin_spends.py @@ -45,7 +45,7 @@ additional_data: bytes32 = bytes32(DEFAULT_CONSTANTS.AGG_SIG_ME_ADDITIONAL_DATA) -coin: Coin = Coin(bytes32([0] * 32), bytes32([0] * 32), uint64(0)) +coin: Coin = Coin(bytes32.zeros, bytes32.zeros, uint64(0)) puzzle = SerializedProgram.from_bytes(b"\x01") solution_h = SerializedProgram.from_program( Program.to([[ConditionOpcode.AGG_SIG_UNSAFE, pk1_h, msg1], [ConditionOpcode.AGG_SIG_ME, pk2_h_synth, msg2]]) @@ -90,7 +90,7 @@ async def test_wsm_sign_transaction() -> None: [ DerivationRecord( uint32(1), - bytes32([0] * 32), + bytes32.zeros, pk1_h, WalletType.STANDARD_WALLET, uint32(1), @@ -129,7 +129,7 @@ async def test_wsm_sign_transaction() -> None: [ DerivationRecord( uint32(1), - bytes32([0] * 32), + bytes32.zeros, pk1_u, WalletType.STANDARD_WALLET, uint32(1), diff --git a/chia/_tests/wallet/test_signer_protocol.py b/chia/_tests/wallet/test_signer_protocol.py index 8fd753bac9b0..ceacc02ade73 100644 --- a/chia/_tests/wallet/test_signer_protocol.py +++ b/chia/_tests/wallet/test_signer_protocol.py @@ -87,7 +87,7 @@ def test_unsigned_transaction_type() -> None: pubkey: G1Element = G1Element() message: bytes = b"message" - coin: ConsensusCoin = ConsensusCoin(bytes32([0] * 32), bytes32([0] * 32), uint64(0)) + coin: ConsensusCoin = ConsensusCoin(bytes32.zeros, bytes32.zeros, uint64(0)) puzzle: Program = Program.to(1) solution: Program = Program.to([AggSigMe(pubkey, message).to_program()]) @@ -194,7 +194,7 @@ async def test_p2dohp_wallet_signer_protocol(wallet_environments: WalletTestFram not_our_pubkey: G1Element = not_our_private_key.get_g1() not_our_message: bytes = b"not our message" not_our_coin: ConsensusCoin = ConsensusCoin( - bytes32([0] * 32), + bytes32.zeros, ACS_PH, uint64(0), ) @@ -514,7 +514,7 @@ def test_blind_signer_translation_layer() -> None: PathHint(b"root2", [uint64(4), uint64(5), uint64(6)]), ] signing_targets: list[SigningTarget] = [ - SigningTarget(b"pubkey", b"message", bytes32([0] * 32)), + SigningTarget(b"pubkey", b"message", bytes32.zeros), SigningTarget(b"pubkey2", b"message2", bytes32([1] * 32)), ] @@ -540,7 +540,7 @@ def test_blind_signer_translation_layer() -> None: BSTLPathHint(b"root2", [uint64(4), uint64(5), uint64(6)]), ] bstl_signing_targets: list[BSTLSigningTarget] = [ - BSTLSigningTarget(b"pubkey", b"message", bytes32([0] * 32)), + BSTLSigningTarget(b"pubkey", b"message", bytes32.zeros), BSTLSigningTarget(b"pubkey2", b"message2", bytes32([1] * 32)), ] @@ -616,7 +616,7 @@ async def test_signer_commands(wallet_environments: WalletTestFramework) -> None AMOUNT = uint64(1) async with wallet_state_manager.new_action_scope(DEFAULT_TX_CONFIG, sign=False, push=False) as action_scope: - await wallet.generate_signed_transaction(AMOUNT, bytes32([0] * 32), action_scope) + await wallet.generate_signed_transaction(AMOUNT, bytes32.zeros, action_scope) [tx] = action_scope.side_effects.transactions runner = CliRunner() @@ -797,8 +797,8 @@ def from_wallet_api(_from: Coin) -> FooCoin: @staticmethod def to_wallet_api(_from: FooCoin) -> Coin: return Coin( - bytes32([0] * 32), - bytes32([0] * 32), + bytes32.zeros, + bytes32.zeros, _from.amount, ) @@ -822,7 +822,7 @@ def test_signer_protocol_in(monkeypatch: pytest.MonkeyPatch) -> None: def cmd() -> None: pass - coin = Coin(bytes32([0] * 32), bytes32([0] * 32), uint64(13)) + coin = Coin(bytes32.zeros, bytes32.zeros, uint64(13)) @chia_command(cmd, "temp_cmd", "blah") class TempCMD(SPIn): @@ -878,7 +878,7 @@ def test_signer_protocol_out(monkeypatch: pytest.MonkeyPatch) -> None: def cmd() -> None: pass - coin = Coin(bytes32([0] * 32), bytes32([0] * 32), uint64(0)) + coin = Coin(bytes32.zeros, bytes32.zeros, uint64(0)) coin_bytes = byte_serialize_clvm_streamable(coin) @chia_command(cmd, "temp_cmd", "blah") diff --git a/chia/_tests/wallet/test_singleton_lifecycle_fast.py b/chia/_tests/wallet/test_singleton_lifecycle_fast.py index 7eb710b22b9f..09e2318508c1 100644 --- a/chia/_tests/wallet/test_singleton_lifecycle_fast.py +++ b/chia/_tests/wallet/test_singleton_lifecycle_fast.py @@ -508,7 +508,7 @@ def test_lifecycle_with_coinstore_as_wallet() -> None: now = CoinTimestamp(10012300, uint32(1)) DELAY_SECONDS = 86400 - DELAY_PUZZLE_HASH = bytes32([0] * 32) + DELAY_PUZZLE_HASH = bytes32.zeros ####### # spend coin to a singleton diff --git a/chia/_tests/wallet/test_transaction_store.py b/chia/_tests/wallet/test_transaction_store.py index 6b20b2a7adc0..13378db63305 100644 --- a/chia/_tests/wallet/test_transaction_store.py +++ b/chia/_tests/wallet/test_transaction_store.py @@ -842,7 +842,7 @@ async def test_valid_times_migration() -> None: old_record = TransactionRecordOld( confirmed_at_height=uint32(0), created_at_time=uint64(1000000000), - to_puzzle_hash=bytes32([0] * 32), + to_puzzle_hash=bytes32.zeros, amount=uint64(0), fee_amount=uint64(0), confirmed=False, @@ -854,7 +854,7 @@ async def test_valid_times_migration() -> None: sent_to=[], trade_id=None, type=uint32(TransactionType.INCOMING_TX.value), - name=bytes32([0] * 32), + name=bytes32.zeros, memos=[], ) @@ -893,7 +893,7 @@ async def test_large_tx_record_query() -> None: record = TransactionRecordOld( confirmed_at_height=uint32(0), created_at_time=uint64(1000000000), - to_puzzle_hash=bytes32([0] * 32), + to_puzzle_hash=bytes32.zeros, amount=uint64(0), fee_amount=uint64(0), confirmed=False, diff --git a/chia/_tests/wallet/test_util.py b/chia/_tests/wallet/test_util.py index 5ea8aa4bbe8e..5e56f425125c 100644 --- a/chia/_tests/wallet/test_util.py +++ b/chia/_tests/wallet/test_util.py @@ -36,22 +36,18 @@ def test_compute_spend_hints_and_additions() -> None: expected_dict = {hinted_coin.coin.name(): hinted_coin for hinted_coin in hinted_coins} assert compute_spend_hints_and_additions(coin_spend)[0] == expected_dict - not_hinted_coin = HintedCoin(Coin(parent_coin.coin.name(), bytes32([0] * 32), uint64(0)), None) + not_hinted_coin = HintedCoin(Coin(parent_coin.coin.name(), bytes32.zeros, uint64(0)), None) assert compute_spend_hints_and_additions( - make_spend(parent_coin.coin, Program.to(1), Program.to([[51, bytes32([0] * 32), 0, [["not", "a"], "hint"]]])) + make_spend(parent_coin.coin, Program.to(1), Program.to([[51, bytes32.zeros, 0, [["not", "a"], "hint"]]])) )[0] == {not_hinted_coin.coin.name(): not_hinted_coin} with pytest.raises(ValidationError): compute_spend_hints_and_additions( - make_spend( - parent_coin.coin, Program.to(1), Program.to([[51, bytes32([0] * 32), 0] for _ in range(0, 10000)]) - ) + make_spend(parent_coin.coin, Program.to(1), Program.to([[51, bytes32.zeros, 0] for _ in range(0, 10000)])) ) with pytest.raises(ValidationError): compute_spend_hints_and_additions( - make_spend( - parent_coin.coin, Program.to(1), Program.to([[50, bytes48([0] * 48), b""] for _ in range(0, 10000)]) - ) + make_spend(parent_coin.coin, Program.to(1), Program.to([[50, bytes48.zeros, b""] for _ in range(0, 10000)])) ) @@ -66,7 +62,7 @@ def test_cs_config() -> None: "min_coin_amount": 50, } coin_to_exclude = CoinGenerator().get().coin - coin_id_to_exclude = bytes32([0] * 32) + coin_id_to_exclude = bytes32.zeros assert CoinSelectionConfigLoader.from_json_dict( { "excluded_coins": [coin_to_exclude.to_json_dict()], @@ -112,15 +108,15 @@ def test_list_to_binary_tree() -> None: "serializations", [ (tuple(), Program.to(None), []), - ((bytes32([0] * 32),), Program.to([bytes32([0] * 32)]), [LineageProofField.PARENT_NAME]), + ((bytes32.zeros,), Program.to([bytes32.zeros]), [LineageProofField.PARENT_NAME]), ( - (bytes32([0] * 32), bytes32([0] * 32)), - Program.to([bytes32([0] * 32), bytes32([0] * 32)]), + (bytes32.zeros, bytes32.zeros), + Program.to([bytes32.zeros, bytes32.zeros]), [LineageProofField.PARENT_NAME, LineageProofField.INNER_PUZZLE_HASH], ), ( - (bytes32([0] * 32), bytes32([0] * 32), uint64(0)), - Program.to([bytes32([0] * 32), bytes32([0] * 32), uint64(0)]), + (bytes32.zeros, bytes32.zeros, uint64(0)), + Program.to([bytes32.zeros, bytes32.zeros, uint64(0)]), [LineageProofField.PARENT_NAME, LineageProofField.INNER_PUZZLE_HASH, LineageProofField.AMOUNT], ), ], @@ -135,31 +131,31 @@ def test_lineage_proof_varargs(serializations: tuple[tuple[Any, ...], Program, l "serializations", [ ({}, Program.to(None), []), - ({"parent_name": bytes32([0] * 32)}, Program.to([bytes32([0] * 32)]), [LineageProofField.PARENT_NAME]), + ({"parent_name": bytes32.zeros}, Program.to([bytes32.zeros]), [LineageProofField.PARENT_NAME]), ( - {"parent_name": bytes32([0] * 32), "inner_puzzle_hash": bytes32([0] * 32)}, - Program.to([bytes32([0] * 32), bytes32([0] * 32)]), + {"parent_name": bytes32.zeros, "inner_puzzle_hash": bytes32.zeros}, + Program.to([bytes32.zeros, bytes32.zeros]), [LineageProofField.PARENT_NAME, LineageProofField.INNER_PUZZLE_HASH], ), ( - {"parent_name": bytes32([0] * 32), "inner_puzzle_hash": bytes32([0] * 32), "amount": uint64(0)}, - Program.to([bytes32([0] * 32), bytes32([0] * 32), uint64(0)]), + {"parent_name": bytes32.zeros, "inner_puzzle_hash": bytes32.zeros, "amount": uint64(0)}, + Program.to([bytes32.zeros, bytes32.zeros, uint64(0)]), [LineageProofField.PARENT_NAME, LineageProofField.INNER_PUZZLE_HASH, LineageProofField.AMOUNT], ), ( - {"parent_name": bytes32([0] * 32), "amount": uint64(0)}, - Program.to([bytes32([0] * 32), uint64(0)]), + {"parent_name": bytes32.zeros, "amount": uint64(0)}, + Program.to([bytes32.zeros, uint64(0)]), [LineageProofField.PARENT_NAME, LineageProofField.AMOUNT], ), ( - {"inner_puzzle_hash": bytes32([0] * 32), "amount": uint64(0)}, - Program.to([bytes32([0] * 32), uint64(0)]), + {"inner_puzzle_hash": bytes32.zeros, "amount": uint64(0)}, + Program.to([bytes32.zeros, uint64(0)]), [LineageProofField.INNER_PUZZLE_HASH, LineageProofField.AMOUNT], ), ({"amount": uint64(0)}, Program.to([uint64(0)]), [LineageProofField.AMOUNT]), ( - {"inner_puzzle_hash": bytes32([0] * 32)}, - Program.to([bytes32([0] * 32)]), + {"inner_puzzle_hash": bytes32.zeros}, + Program.to([bytes32.zeros]), [LineageProofField.INNER_PUZZLE_HASH], ), ], @@ -174,7 +170,7 @@ def test_lineage_proof_errors() -> None: with pytest.raises(ValueError, match="Mismatch"): LineageProof.from_program(Program.to([]), [LineageProofField.PARENT_NAME]) with pytest.raises(StopIteration): - LineageProof.from_program(Program.to([bytes32([0] * 32)]), []) + LineageProof.from_program(Program.to([bytes32.zeros]), []) with pytest.raises(ValueError): LineageProof.from_program(Program.to([bytes32([1] * 32)]), [LineageProofField.AMOUNT]) with pytest.raises(ValueError): diff --git a/chia/_tests/wallet/test_wallet.py b/chia/_tests/wallet/test_wallet.py index 21302c456f32..5cb4753f09a0 100644 --- a/chia/_tests/wallet/test_wallet.py +++ b/chia/_tests/wallet/test_wallet.py @@ -78,7 +78,7 @@ async def test_wallet_make_transaction(self, wallet_environments: WalletTestFram async with wallet.wallet_state_manager.new_action_scope(DEFAULT_TX_CONFIG, push=True) as action_scope: await wallet.generate_signed_transaction( uint64(tx_amount), - bytes32([0] * 32), + bytes32.zeros, action_scope, uint64(0), ) @@ -133,7 +133,7 @@ async def test_wallet_reuse_address(self, wallet_environments: WalletTestFramewo ) as action_scope: await wallet.generate_signed_transaction( uint64(tx_amount), - bytes32([0] * 32), + bytes32.zeros, action_scope, uint64(0), ) @@ -777,7 +777,7 @@ async def test_wallet_clawback_reorg(self, wallet_environments: WalletTestFramew height = full_node_api.full_node.blockchain.get_peak_height() assert height is not None await full_node_api.reorg_from_index_to_new_index( - ReorgProtocol(uint32(height - 2), uint32(height + 1), bytes32([0] * 32), None) + ReorgProtocol(uint32(height - 2), uint32(height + 1), bytes32.zeros, None) ) await time_out_assert(20, wsm.coin_store.count_small_unspent, 0, 1000, CoinType.CLAWBACK) @@ -859,7 +859,7 @@ async def test_wallet_clawback_reorg(self, wallet_environments: WalletTestFramew height = full_node_api.full_node.blockchain.get_peak_height() assert height is not None await full_node_api.reorg_from_index_to_new_index( - ReorgProtocol(uint32(height - 1), uint32(height + 1), bytes32([0] * 32), None) + ReorgProtocol(uint32(height - 1), uint32(height + 1), bytes32.zeros, None) ) await time_out_assert(20, wsm.coin_store.count_small_unspent, 1, 1000, CoinType.CLAWBACK) @@ -916,7 +916,7 @@ async def test_get_clawback_coins(self, wallet_environments: WalletTestFramework async with wallet.wallet_state_manager.new_action_scope(DEFAULT_TX_CONFIG, push=True) as action_scope: await wallet.generate_signed_transaction( uint64(tx_amount), - bytes32([0] * 32), + bytes32.zeros, action_scope, uint64(0), puzzle_decorator_override=[{"decorator": "CLAWBACK", "clawback_timelock": 500}], @@ -1666,7 +1666,7 @@ async def test_wallet_prevent_fee_theft(self, wallet_environments: WalletTestFra async with wallet.wallet_state_manager.new_action_scope(DEFAULT_TX_CONFIG, push=False) as action_scope: await wallet.generate_signed_transaction( uint64(tx_amount), - bytes32([0] * 32), + bytes32.zeros, action_scope, uint64(tx_fee), ) @@ -2042,7 +2042,7 @@ async def test_wallet_transaction_options(self, wallet_environments: WalletTestF coin_list = list(coins) await wallet.generate_signed_transaction( uint64(AMOUNT_TO_SEND), - bytes32([0] * 32), + bytes32.zeros, action_scope, uint64(0), coins=coins, diff --git a/chia/_tests/wallet/test_wallet_action_scope.py b/chia/_tests/wallet/test_wallet_action_scope.py index 2ac0e8d56fba..e72b405c025d 100644 --- a/chia/_tests/wallet/test_wallet_action_scope.py +++ b/chia/_tests/wallet/test_wallet_action_scope.py @@ -17,9 +17,9 @@ from chia.wallet.wallet_spend_bundle import WalletSpendBundle from chia.wallet.wallet_state_manager import WalletStateManager -MOCK_SR = SigningResponse(b"hey", bytes32([0] * 32)) +MOCK_SR = SigningResponse(b"hey", bytes32.zeros) MOCK_SB = WalletSpendBundle([], G2Element()) -MOCK_COIN = Coin(bytes32([0] * 32), bytes32([0] * 32), uint64(0)) +MOCK_COIN = Coin(bytes32.zeros, bytes32.zeros, uint64(0)) def test_back_and_forth_serialization() -> None: diff --git a/chia/_tests/wallet/test_wallet_node.py b/chia/_tests/wallet/test_wallet_node.py index 16f3779f94e5..3997502241ba 100644 --- a/chia/_tests/wallet/test_wallet_node.py +++ b/chia/_tests/wallet/test_wallet_node.py @@ -638,7 +638,7 @@ async def send_transaction( # Generate the transaction async with wallet.wallet_state_manager.new_action_scope(DEFAULT_TX_CONFIG, push=True) as action_scope: - await wallet.generate_signed_transaction(uint64(0), bytes32([0] * 32), action_scope) + await wallet.generate_signed_transaction(uint64(0), bytes32.zeros, action_scope) [tx] = action_scope.side_effects.transactions # Make sure it is sent to the peer @@ -695,7 +695,7 @@ async def register_interest_in_coin( return make_msg( ProtocolMessageTypes.respond_to_coin_update, wallet_protocol.RespondToCoinUpdates( - [], uint32(0), [CoinState(Coin(bytes32([0] * 32), bytes32([0] * 32), uint64(0)), uint32(0), uint32(0))] + [], uint32(0), [CoinState(Coin(bytes32.zeros, bytes32.zeros, uint64(0)), uint32(0), uint32(0))] ), ) diff --git a/chia/_tests/wallet/test_wallet_state_manager.py b/chia/_tests/wallet/test_wallet_state_manager.py index e0df7b793325..481173c0bd51 100644 --- a/chia/_tests/wallet/test_wallet_state_manager.py +++ b/chia/_tests/wallet/test_wallet_state_manager.py @@ -126,13 +126,13 @@ async def test_commit_transactions_to_db(wallet_environments: WalletTestFramewor coins = list(await wsm.main_wallet.select_coins(uint64(2_000_000_000_000), action_scope)) await wsm.main_wallet.generate_signed_transaction( uint64(0), - bytes32([0] * 32), + bytes32.zeros, action_scope, coins={coins[0]}, ) await wsm.main_wallet.generate_signed_transaction( uint64(0), - bytes32([0] * 32), + bytes32.zeros, action_scope, coins={coins[1]}, ) diff --git a/chia/_tests/wallet/test_wallet_trade_store.py b/chia/_tests/wallet/test_wallet_trade_store.py index d0267c3ab9be..ea34b546ff9c 100644 --- a/chia/_tests/wallet/test_wallet_trade_store.py +++ b/chia/_tests/wallet/test_wallet_trade_store.py @@ -133,7 +133,7 @@ async def test_valid_times_migration() -> None: ) fake_offer = Offer({}, WalletSpendBundle([], G2Element()), {}) - fake_coin = Coin(bytes32([0] * 32), bytes32([0] * 32), uint64(0)) + fake_coin = Coin(bytes32.zeros, bytes32.zeros, uint64(0)) old_record = TradeRecordOld( confirmed_at_index=uint32(0), accepted_at_time=None, @@ -143,7 +143,7 @@ async def test_valid_times_migration() -> None: offer=bytes(fake_offer), taken_offer=None, coins_of_interest=[fake_coin], - trade_id=bytes32([0] * 32), + trade_id=bytes32.zeros, status=uint32(TradeStatus.PENDING_ACCEPT.value), sent_to=[], ) diff --git a/chia/_tests/wallet/vc_wallet/test_cr_outer_puzzle.py b/chia/_tests/wallet/vc_wallet/test_cr_outer_puzzle.py index 4bb7aa6dfc4f..230df2f9c649 100644 --- a/chia/_tests/wallet/vc_wallet/test_cr_outer_puzzle.py +++ b/chia/_tests/wallet/vc_wallet/test_cr_outer_puzzle.py @@ -20,7 +20,7 @@ def test_cat_outer_puzzle() -> None: - authorized_providers: list[bytes32] = [bytes32([0] * 32), bytes32([0] * 32)] + authorized_providers: list[bytes32] = [bytes32.zeros, bytes32.zeros] proofs_checker: Program = Program.to(None) ACS: Program = Program.to(1) cr_puzzle: Program = construct_cr_layer(authorized_providers, proofs_checker, ACS) @@ -42,7 +42,7 @@ def test_cat_outer_puzzle() -> None: assert create_asset_id(cr_driver) is None # Set up for solve - coin: Coin = Coin(bytes32([0] * 32), bytes32([0] * 32), uint64(0)) + coin: Coin = Coin(bytes32.zeros, bytes32.zeros, uint64(0)) coin_as_hex: str = ( "0x" + coin.parent_coin_info.hex() + coin.puzzle_hash.hex() + uint64(coin.amount).stream_to_bytes().hex() ) diff --git a/chia/_tests/wallet/vc_wallet/test_vc_lifecycle.py b/chia/_tests/wallet/vc_wallet/test_vc_lifecycle.py index aaf29a6f3abe..6cce396c9fc7 100644 --- a/chia/_tests/wallet/vc_wallet/test_vc_lifecycle.py +++ b/chia/_tests/wallet/vc_wallet/test_vc_lifecycle.py @@ -47,7 +47,7 @@ ACS_2_PH: bytes32 = ACS_2.get_tree_hash() MOCK_SINGLETON_MOD: Program = Program.to([2, 5, 11]) MOCK_SINGLETON_MOD_HASH: bytes32 = MOCK_SINGLETON_MOD.get_tree_hash() -MOCK_LAUNCHER_ID: bytes32 = bytes32([0] * 32) +MOCK_LAUNCHER_ID: bytes32 = bytes32.zeros MOCK_LAUNCHER_HASH: bytes32 = bytes32([1] * 32) MOCK_SINGLETON: Program = MOCK_SINGLETON_MOD.curry( (MOCK_SINGLETON_MOD_HASH, (MOCK_LAUNCHER_ID, MOCK_LAUNCHER_HASH)), @@ -210,7 +210,7 @@ async def test_did_tp(cost_logger: CostLogger) -> None: my_coin_id: bytes32 = eml_coin.name() new_metadata: Program = Program.to("SUCCESS") new_tp_hash = Program.to("NEW TP").get_tree_hash() - bad_data: bytes32 = bytes32([0] * 32) + bad_data: bytes32 = bytes32.zeros # Try to update metadata and tp without any announcement result: tuple[MempoolInclusionStatus, Optional[Err]] = await client.push_tx( @@ -354,7 +354,7 @@ async def test_viral_backdoor(cost_logger: CostLogger) -> None: p2_either_puzzle, solve_viral_backdoor( hidden_puzzle, - Program.to(bytes32([0] * 32)), + Program.to(bytes32.zeros), hidden=True, ), ) @@ -365,7 +365,7 @@ async def test_viral_backdoor(cost_logger: CostLogger) -> None: assert result == (MempoolInclusionStatus.FAILED, Err.ASSERT_ANNOUNCE_CONSUMED_FAILED) # Spend the inner puzzle - brick_hash: bytes32 = bytes32([0] * 32) + brick_hash: bytes32 = bytes32.zeros wrapped_brick_hash: bytes32 = create_viral_backdoor( hidden_puzzle_hash, brick_hash, @@ -504,7 +504,7 @@ async def test_vc_lifecycle(test_syncing: bool, cost_logger: CostLogger) -> None [vc_fund_coin], launcher_id, ACS_PH, - [bytes32([0] * 32)], + [bytes32.zeros], ) result: tuple[MempoolInclusionStatus, Optional[Err]] = await client.push_tx( cost_logger.add_cost( diff --git a/chia/_tests/wallet/vc_wallet/test_vc_wallet.py b/chia/_tests/wallet/vc_wallet/test_vc_wallet.py index 893ef54cfc67..701d44be501f 100644 --- a/chia/_tests/wallet/vc_wallet/test_vc_wallet.py +++ b/chia/_tests/wallet/vc_wallet/test_vc_wallet.py @@ -699,7 +699,7 @@ async def test_self_revoke(wallet_environments: WalletTestFramework) -> None: await (await wallet_node_0.wallet_state_manager.get_or_create_vc_wallet()).generate_signed_transaction( new_vc_record.vc.launcher_id, action_scope, - new_proof_hash=bytes32([0] * 32), + new_proof_hash=bytes32.zeros, self_revoke=True, ) @@ -707,7 +707,7 @@ async def test_self_revoke(wallet_environments: WalletTestFramework) -> None: async with did_wallet.wallet_state_manager.new_action_scope( wallet_environments.tx_config, push=True ) as action_scope: - await did_wallet.transfer_did(bytes32([0] * 32), uint64(0), False, action_scope) + await did_wallet.transfer_did(bytes32.zeros, uint64(0), False, action_scope) await wallet_environments.process_pending_states( [ @@ -788,7 +788,7 @@ async def test_cat_wallet_conversion( wallet_node_0.wallet_state_manager, wallet_0, Program.to(None).get_tree_hash().hex() ) - did_id = bytes32([0] * 32) + did_id = bytes32.zeros await mint_cr_cat(num_blocks, wallet_0, wallet_node_0, client_0, full_node_api, [did_id]) await full_node_api.farm_blocks_to_wallet(count=num_blocks, wallet=wallet_0) await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, timeout=20) diff --git a/chia/_tests/wallet/wallet_block_tools.py b/chia/_tests/wallet/wallet_block_tools.py index 2fded0a56b40..577bbbb6a7eb 100644 --- a/chia/_tests/wallet/wallet_block_tools.py +++ b/chia/_tests/wallet/wallet_block_tools.py @@ -30,14 +30,14 @@ from chia.util.ints import uint8, uint32, uint64, uint128 DEFAULT_PROOF_OF_SPACE = ProofOfSpace( - bytes32([0] * 32), + bytes32.zeros, G1Element(), None, G1Element(), uint8(20), bytes(32 * 5), ) -DEFAULT_VDF_INFO = VDFInfo(bytes32([0] * 32), uint64(1), ClassgroupElement(bytes100([0] * 100))) +DEFAULT_VDF_INFO = VDFInfo(bytes32.zeros, uint64(1), ClassgroupElement(bytes100.zeros)) DEFAULT_VDF_PROOF = VDFProof(uint8(0), bytes(100), False) @@ -170,7 +170,7 @@ def finish_block( new_height, uint128(1), uint8(1), - bytes32([0] * 32), + bytes32.zeros, unfinished_block.reward_chain_block.proof_of_space, DEFAULT_VDF_INFO, G2Element(), @@ -263,21 +263,21 @@ def get_full_block_and_block_record( additions_root = bytes32(compute_merkle_set_root(additions_merkle_items)) removals_root = bytes32(compute_merkle_set_root(removal_ids)) - generator_hash = bytes32([0] * 32) + generator_hash = bytes32.zeros if block_generator is not None: generator_hash = std_hash(block_generator.program) foliage_data = FoliageBlockData( - bytes32([0] * 32), + bytes32.zeros, pool_target, G2Element(), farmer_reward_puzzlehash, - bytes32([0] * 32), + bytes32.zeros, ) transactions_info = TransactionsInfo( generator_hash, - bytes32([0] * 32), + bytes32.zeros, G2Element(), fees, uint64(constants.MAX_BLOCK_COST_CLVM), @@ -295,7 +295,7 @@ def get_full_block_and_block_record( foliage = Foliage( prev_block_hash, - bytes32([0] * 32), + bytes32.zeros, foliage_data, G2Element(), foliage_transaction_block.get_hash(), @@ -306,7 +306,7 @@ def get_full_block_and_block_record( RewardChainBlockUnfinished( uint128(1), uint8(1), - bytes32([0] * 32), + bytes32.zeros, DEFAULT_PROOF_OF_SPACE, None, G2Element(), diff --git a/chia/consensus/block_creation.py b/chia/consensus/block_creation.py index 66b28e0f8b7e..0d69fa716b35 100644 --- a/chia/consensus/block_creation.py +++ b/chia/consensus/block_creation.py @@ -222,7 +222,7 @@ def create_foliage( additions_root = bytes32(compute_merkle_set_root(additions_merkle_items)) removals_root = bytes32(compute_merkle_set_root(tx_removals)) - generator_hash = bytes32([0] * 32) + generator_hash = bytes32.zeros if block_generator is not None: generator_hash = std_hash(block_generator.program) diff --git a/chia/consensus/block_header_validation.py b/chia/consensus/block_header_validation.py index a450e2f4e989..e2e5f3c19b67 100644 --- a/chia/consensus/block_header_validation.py +++ b/chia/consensus/block_header_validation.py @@ -574,14 +574,14 @@ def validate_unfinished_header_block( sp_total_iters: uint128 = uint128(total_iters - ip_iters + sp_iters - (expected_sub_slot_iters if overflow else 0)) if overflow and skip_overflow_last_ss_validation: dummy_vdf_info = VDFInfo( - bytes32([0] * 32), + bytes32.zeros, uint64(1), ClassgroupElement.get_default_element(), ) dummy_sub_slot = EndOfSubSlotBundle( ChallengeChainSubSlot(dummy_vdf_info, None, None, None, None), None, - RewardChainSubSlot(dummy_vdf_info, bytes32([0] * 32), None, uint8(0)), + RewardChainSubSlot(dummy_vdf_info, bytes32.zeros, None, uint8(0)), SubSlotProofs(VDFProof(uint8(0), b"", False), None, VDFProof(uint8(0), b"", False)), ) sub_slots_to_pass_in = header_block.finished_sub_slots + [dummy_sub_slot] diff --git a/chia/data_layer/data_layer.py b/chia/data_layer/data_layer.py index f55e879fcb27..3aef5354ac76 100644 --- a/chia/data_layer/data_layer.py +++ b/chia/data_layer/data_layer.py @@ -104,7 +104,7 @@ class DataLayer: server_files_location: Path unsubscribe_data_queue: list[UnsubscribeData] _server: Optional[ChiaServer] = None - none_bytes: bytes32 = bytes32([0] * 32) + none_bytes: bytes32 = bytes32.zeros initialized: bool = False _data_store: Optional[DataStore] = None state_changed_callback: Optional[StateChangedProtocol] = None @@ -237,9 +237,7 @@ async def wallet_log_in(self, fingerprint: int) -> int: return result.fingerprint - async def create_store( - self, fee: uint64, root: bytes32 = bytes32([0] * 32) - ) -> tuple[list[TransactionRecord], bytes32]: + async def create_store(self, fee: uint64, root: bytes32 = bytes32.zeros) -> tuple[list[TransactionRecord], bytes32]: txs, store_id = await self.wallet_rpc.create_new_dl(root, fee) res = await self.data_store.create_tree(store_id=store_id) if res is None: diff --git a/chia/data_layer/data_layer_wallet.py b/chia/data_layer/data_layer_wallet.py index 76195d2f4b8b..888d8b768b2e 100644 --- a/chia/data_layer/data_layer_wallet.py +++ b/chia/data_layer/data_layer_wallet.py @@ -1108,9 +1108,7 @@ async def make_update_offer( # create some dummy requested payments requested_payments = { - k: [NotarizedPayment(bytes32([0] * 32), uint64(v), [], bytes32([0] * 32))] - for k, v in offer_dict.items() - if v > 0 + k: [NotarizedPayment(bytes32.zeros, uint64(v), [], bytes32.zeros)] for k, v in offer_dict.items() if v > 0 } async with action_scope.use() as interface: diff --git a/chia/data_layer/data_store.py b/chia/data_layer/data_store.py index 2132728bebca..ead6f196cfd2 100644 --- a/chia/data_layer/data_store.py +++ b/chia/data_layer/data_store.py @@ -924,11 +924,11 @@ async def get_kv_diff_paginated( hash2: bytes32, ) -> KVDiffPaginationData: old_pairs = await self.get_keys_values_compressed(store_id, hash1) - if len(old_pairs.keys_values_hashed) == 0 and hash1 != bytes32([0] * 32): + if len(old_pairs.keys_values_hashed) == 0 and hash1 != bytes32.zeros: raise Exception(f"Unable to diff: Can't find keys and values for {hash1}") new_pairs = await self.get_keys_values_compressed(store_id, hash2) - if len(new_pairs.keys_values_hashed) == 0 and hash2 != bytes32([0] * 32): + if len(new_pairs.keys_values_hashed) == 0 and hash2 != bytes32.zeros: raise Exception(f"Unable to diff: Can't find keys and values for {hash2}") old_pairs_leaf_hashes = {v for v in old_pairs.keys_values_hashed.values()} @@ -1960,7 +1960,7 @@ async def write_tree_to_file( deltas_only: bool, writer: BinaryIO, ) -> None: - if node_hash == bytes32([0] * 32): + if node_hash == bytes32.zeros: return if deltas_only: @@ -2248,11 +2248,11 @@ async def get_kv_diff( ) -> set[DiffData]: async with self.db_wrapper.reader(): old_pairs = set(await self.get_keys_values(store_id, hash_1)) - if len(old_pairs) == 0 and hash_1 != bytes32([0] * 32): + if len(old_pairs) == 0 and hash_1 != bytes32.zeros: raise Exception(f"Unable to diff: Can't find keys and values for {hash_1}") new_pairs = set(await self.get_keys_values(store_id, hash_2)) - if len(new_pairs) == 0 and hash_2 != bytes32([0] * 32): + if len(new_pairs) == 0 and hash_2 != bytes32.zeros: raise Exception(f"Unable to diff: Can't find keys and values for {hash_2}") insertions = { diff --git a/chia/data_layer/download_data.py b/chia/data_layer/download_data.py index 8c3a5e8d152c..9b210123995c 100644 --- a/chia/data_layer/download_data.py +++ b/chia/data_layer/download_data.py @@ -145,7 +145,7 @@ async def write_files_for_root( if root.node_hash is not None: node_hash = root.node_hash else: - node_hash = bytes32([0] * 32) # todo change + node_hash = bytes32.zeros # todo change filename_full_tree = get_full_tree_filename_path(foldername, store_id, node_hash, root.generation, group_by_store) filename_diff_tree = get_delta_filename_path(foldername, store_id, node_hash, root.generation, group_by_store) @@ -291,7 +291,7 @@ async def insert_from_delta_file( num_inserted = await insert_into_data_store_from_file( data_store, store_id, - None if root_hash == bytes32([0] * 32) else root_hash, + None if root_hash == bytes32.zeros else root_hash, target_filename_path, ) log.info( @@ -336,7 +336,7 @@ def delete_full_file_if_exists(foldername: Path, store_id: bytes32, root: Root) if root.node_hash is not None: node_hash = root.node_hash else: - node_hash = bytes32([0] * 32) # todo change + node_hash = bytes32.zeros # todo change not_found = 0 for group_by_store in (True, False): diff --git a/chia/full_node/full_node.py b/chia/full_node/full_node.py index f30d45460a1a..b47715b79393 100644 --- a/chia/full_node/full_node.py +++ b/chia/full_node/full_node.py @@ -2378,7 +2378,7 @@ async def add_end_of_sub_slot( full_node_request = full_node_protocol.RequestSignagePointOrEndOfSubSlot( end_of_slot_bundle.challenge_chain.challenge_chain_end_of_slot_vdf.challenge, uint8(0), - bytes32([0] * 32), + bytes32.zeros, ) return ( make_msg(ProtocolMessageTypes.request_signage_point_or_end_of_sub_slot, full_node_request), diff --git a/chia/full_node/full_node_api.py b/chia/full_node/full_node_api.py index ab0d8853a6a8..50cef57c2fff 100644 --- a/chia/full_node/full_node_api.py +++ b/chia/full_node/full_node_api.py @@ -1008,7 +1008,7 @@ def get_pool_sig(_1: PoolTarget, _2: Optional[G1Element]) -> Optional[G2Element] if unfinished_block.is_transaction_block(): foliage_transaction_block_hash = unfinished_block.foliage.foliage_transaction_block_hash else: - foliage_transaction_block_hash = bytes32([0] * 32) + foliage_transaction_block_hash = bytes32.zeros assert foliage_transaction_block_hash is not None foliage_block_data: Optional[FoliageBlockData] = None diff --git a/chia/rpc/data_layer_rpc_api.py b/chia/rpc/data_layer_rpc_api.py index 97bdf95b1848..f7f7ca308e32 100644 --- a/chia/rpc/data_layer_rpc_api.py +++ b/chia/rpc/data_layer_rpc_api.py @@ -199,7 +199,7 @@ async def get_keys(self, request: dict[str, Any]) -> EndpointResult: keys = keys_paginated.keys # NOTE: here we do support zeros as the empty root - if keys == [] and resolved_root_hash is not unspecified and resolved_root_hash != bytes32([0] * 32): + if keys == [] and resolved_root_hash is not unspecified and resolved_root_hash != bytes32.zeros: raise Exception(f"Can't find keys for {resolved_root_hash}") response: EndpointResult = {"keys": [f"0x{key.hex()}" for key in keys]} @@ -239,7 +239,7 @@ async def get_keys_values(self, request: dict[str, Any]) -> EndpointResult: json_nodes = [recurse_jsonify(dataclasses.asdict(node)) for node in keys_values] # NOTE: here we do support zeros as the empty root - if not json_nodes and resolved_root_hash is not unspecified and resolved_root_hash != bytes32([0] * 32): + if not json_nodes and resolved_root_hash is not unspecified and resolved_root_hash != bytes32.zeros: raise Exception(f"Can't find keys and values for {resolved_root_hash}") response: EndpointResult = {"keys_values": json_nodes} diff --git a/chia/simulator/block_tools.py b/chia/simulator/block_tools.py index 809f1419c35b..6a82f64aa678 100644 --- a/chia/simulator/block_tools.py +++ b/chia/simulator/block_tools.py @@ -205,7 +205,7 @@ def __init__( plot_dir: str = "test-plots", log: logging.Logger = logging.getLogger(__name__), ) -> None: - self._block_cache_header = bytes32([0] * 32) + self._block_cache_header = bytes32.zeros self._tempdir = None if root_path is None: diff --git a/chia/simulator/full_node_simulator.py b/chia/simulator/full_node_simulator.py index e0330a691c0b..b791b8ecc611 100644 --- a/chia/simulator/full_node_simulator.py +++ b/chia/simulator/full_node_simulator.py @@ -309,7 +309,7 @@ async def reorg_from_index_to_new_index(self, request: ReorgProtocol): async def farm_blocks_to_puzzlehash( self, count: int, - farm_to: bytes32 = bytes32([0] * 32), + farm_to: bytes32 = bytes32.zeros, guarantee_transaction_blocks: bool = False, timeout: Union[None, _Default, float] = default, _wait_for_synced: bool = True, diff --git a/chia/wallet/dao_wallet/dao_wallet.py b/chia/wallet/dao_wallet/dao_wallet.py index 2c8fb09935aa..2b04cf4dea61 100644 --- a/chia/wallet/dao_wallet/dao_wallet.py +++ b/chia/wallet/dao_wallet/dao_wallet.py @@ -152,7 +152,7 @@ async def create_new_dao_and_wallet( raise ValueError(f"Your balance of {bal} mojos is not enough to create {amount_of_cats} CATs") self.dao_info = DAOInfo( - treasury_id=bytes32([0] * 32), + treasury_id=bytes32.zeros, cat_wallet_id=uint32(0), dao_cat_wallet_id=uint32(0), proposals_list=[], diff --git a/chia/wallet/did_wallet/did_wallet.py b/chia/wallet/did_wallet/did_wallet.py index bb3ae4ed6902..1d6afd4c0f69 100644 --- a/chia/wallet/did_wallet/did_wallet.py +++ b/chia/wallet/did_wallet/did_wallet.py @@ -530,14 +530,14 @@ def puzzle_for_pk(self, pubkey: G1Element) -> Program: return create_singleton_puzzle(innerpuz, self.did_info.origin_coin.name()) else: innerpuz = Program.to((8, 0)) - return create_singleton_puzzle(innerpuz, bytes32([0] * 32)) + return create_singleton_puzzle(innerpuz, bytes32.zeros) def puzzle_hash_for_pk(self, pubkey: G1Element) -> bytes32: if self.did_info.origin_coin is None: # TODO: this seem dumb. Why bother with this case? Is it ever used? # inner puzzle: (8 . 0) innerpuz_hash = shatree_pair(shatree_int(8), NIL_TREEHASH) - return create_singleton_puzzle_hash(innerpuz_hash, bytes32([0] * 32)) + return create_singleton_puzzle_hash(innerpuz_hash, bytes32.zeros) origin_coin_name = self.did_info.origin_coin.name() innerpuz_hash = did_wallet_puzzles.get_inner_puzhash_by_p2( p2_puzhash=puzzle_hash_for_pk(pubkey), diff --git a/chia/wallet/trading/offer.py b/chia/wallet/trading/offer.py index b8b7d5d816f9..89e9d3c38922 100644 --- a/chia/wallet/trading/offer.py +++ b/chia/wallet/trading/offer.py @@ -45,7 +45,6 @@ OFFER_MOD = load_clvm_maybe_recompile("settlement_payments.clsp") OFFER_MOD_HASH = OFFER_MOD.get_tree_hash() -ZERO_32 = bytes32([0] * 32) def detect_dependent_coin( @@ -63,7 +62,7 @@ def detect_dependent_coin( @dataclass(frozen=True) class NotarizedPayment(Payment): - nonce: bytes32 = ZERO_32 + nonce: bytes32 = bytes32.zeros @classmethod def from_condition_and_nonce(cls, condition: Program, nonce: bytes32) -> NotarizedPayment: @@ -592,7 +591,7 @@ def to_spend_bundle(self) -> WalletSpendBundle: additional_coin_spends.append( make_spend( Coin( - ZERO_32, + bytes32.zeros, puzzle_reveal.get_tree_hash(), uint64(0), ), @@ -624,7 +623,7 @@ def from_spend_bundle(cls, bundle: WalletSpendBundle) -> Offer: driver_dict[asset_id] = driver else: asset_id = None - if coin_spend.coin.parent_coin_info == ZERO_32: + if coin_spend.coin.parent_coin_info == bytes32.zeros: notarized_payments: list[NotarizedPayment] = [] for payment_group in coin_spend.solution.to_program().as_iter(): nonce = bytes32(payment_group.first().as_atom()) diff --git a/chia/wallet/vc_wallet/vc_drivers.py b/chia/wallet/vc_wallet/vc_drivers.py index 9ba68ec1e211..6f71074d1098 100644 --- a/chia/wallet/vc_wallet/vc_drivers.py +++ b/chia/wallet/vc_wallet/vc_drivers.py @@ -810,7 +810,7 @@ def _next_vc( Private method that creates the next VC class instance. """ slightly_incomplete_vc: VerifiedCredential = VerifiedCredential( - Coin(self.coin.name(), bytes32([0] * 32), next_amount), + Coin(self.coin.name(), bytes32.zeros, next_amount), LineageProof( self.coin.parent_coin_info, self.construct_exigent_metadata_layer().get_tree_hash(), diff --git a/chia/wallet/vc_wallet/vc_wallet.py b/chia/wallet/vc_wallet/vc_wallet.py index 56d77e813097..cca73ddd5a14 100644 --- a/chia/wallet/vc_wallet/vc_wallet.py +++ b/chia/wallet/vc_wallet/vc_wallet.py @@ -556,7 +556,7 @@ async def add_vc_authorization( *( spend for spend in offer.to_spend_bundle().coin_spends - if spend.coin.parent_coin_info == bytes32([0] * 32) + if spend.coin.parent_coin_info == bytes32.zeros ), *other_spends, ], From 690338ef39a50e620e7aaeeeca523f70627583e2 Mon Sep 17 00:00:00 2001 From: Amine Khaldi Date: Tue, 22 Oct 2024 20:07:52 +0100 Subject: [PATCH 51/69] CHIA-1663 Simplify duplicate output validation in validate_block_body (#18744) Simplify duplicate output validation in validate_block_body. --- chia/consensus/block_body_validation.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/chia/consensus/block_body_validation.py b/chia/consensus/block_body_validation.py index c630e815282c..ba9d89d3c68c 100644 --- a/chia/consensus/block_body_validation.py +++ b/chia/consensus/block_body_validation.py @@ -359,8 +359,8 @@ async def validate_block_body( # 13. Check for duplicate outputs in additions addition_counter = collections.Counter(coin_name for _, coin_name in additions + coinbase_additions) - for k, v in addition_counter.items(): - if v > 1: + for count in addition_counter.values(): + if count > 1: return Err.DUPLICATE_OUTPUT, None # 14. Check for duplicate spends inside block From f15d0047a24874e76ab4dc5701a406f8b1b12fa2 Mon Sep 17 00:00:00 2001 From: Arvid Norberg Date: Tue, 22 Oct 2024 21:08:40 +0200 Subject: [PATCH 52/69] Move `spend_sim.py` into tests (#18743) * move spend_sim into tests * use run_block_generator2() instead of get_name_puzzle_conditions() in CostLogger. It's only used to compute the cost of a spend bundle, in tests. --- chia/_tests/clvm/test_singletons.py | 2 +- chia/_tests/clvm/test_spend_sim.py | 2 +- chia/_tests/conftest.py | 2 +- .../mempool/test_singleton_fast_forward.py | 2 +- .../test_mempoolitem_height_added.py | 2 +- chia/{clvm => _tests/util}/spend_sim.py | 19 +++++++++++-------- .../wallet/cat_wallet/test_cat_lifecycle.py | 2 +- .../wallet/cat_wallet/test_offer_lifecycle.py | 2 +- .../clawback/test_clawback_lifecycle.py | 2 +- .../_tests/wallet/dao_wallet/test_dao_clvm.py | 2 +- .../wallet/db_wallet/test_db_graftroot.py | 2 +- .../wallet/nft_wallet/test_nft_lifecycle.py | 2 +- .../wallet/vc_wallet/test_vc_lifecycle.py | 2 +- 13 files changed, 23 insertions(+), 20 deletions(-) rename chia/{clvm => _tests/util}/spend_sim.py (97%) diff --git a/chia/_tests/clvm/test_singletons.py b/chia/_tests/clvm/test_singletons.py index 8dc2f91d6e37..c2f418156341 100644 --- a/chia/_tests/clvm/test_singletons.py +++ b/chia/_tests/clvm/test_singletons.py @@ -7,7 +7,7 @@ from chia._tests.clvm.test_puzzles import public_key_for_index, secret_exponent_for_index from chia._tests.util.key_tool import KeyTool -from chia.clvm.spend_sim import CostLogger, SimClient, SpendSim, sim_and_client +from chia._tests.util.spend_sim import CostLogger, SimClient, SpendSim, sim_and_client from chia.consensus.default_constants import DEFAULT_CONSTANTS from chia.types.blockchain_format.coin import Coin from chia.types.blockchain_format.program import Program diff --git a/chia/_tests/clvm/test_spend_sim.py b/chia/_tests/clvm/test_spend_sim.py index 2896070972ec..ea7b9a08b367 100644 --- a/chia/_tests/clvm/test_spend_sim.py +++ b/chia/_tests/clvm/test_spend_sim.py @@ -3,7 +3,7 @@ import pytest from chia_rs import G2Element -from chia.clvm.spend_sim import sim_and_client +from chia._tests.util.spend_sim import sim_and_client from chia.types.blockchain_format.program import Program from chia.types.blockchain_format.sized_bytes import bytes32 from chia.types.coin_spend import compute_additions, make_spend diff --git a/chia/_tests/conftest.py b/chia/_tests/conftest.py index eb6d5f2c8770..010d93ccdfc2 100644 --- a/chia/_tests/conftest.py +++ b/chia/_tests/conftest.py @@ -47,8 +47,8 @@ setup_simulators_and_wallets_service, setup_two_nodes, ) +from chia._tests.util.spend_sim import CostLogger from chia._tests.util.time_out_assert import time_out_assert -from chia.clvm.spend_sim import CostLogger from chia.consensus.constants import ConsensusConstants from chia.full_node.full_node import FullNode from chia.full_node.full_node_api import FullNodeAPI diff --git a/chia/_tests/core/mempool/test_singleton_fast_forward.py b/chia/_tests/core/mempool/test_singleton_fast_forward.py index f53b759886c9..8dae2e095bf8 100644 --- a/chia/_tests/core/mempool/test_singleton_fast_forward.py +++ b/chia/_tests/core/mempool/test_singleton_fast_forward.py @@ -17,7 +17,7 @@ spend_bundle_from_conditions, ) from chia._tests.util.key_tool import KeyTool -from chia.clvm.spend_sim import SimClient, SpendSim, sim_and_client +from chia._tests.util.spend_sim import SimClient, SpendSim, sim_and_client from chia.consensus.default_constants import DEFAULT_CONSTANTS from chia.types.blockchain_format.coin import Coin from chia.types.blockchain_format.program import Program diff --git a/chia/_tests/fee_estimation/test_mempoolitem_height_added.py b/chia/_tests/fee_estimation/test_mempoolitem_height_added.py index c883d65e5c39..62c7698541b7 100644 --- a/chia/_tests/fee_estimation/test_mempoolitem_height_added.py +++ b/chia/_tests/fee_estimation/test_mempoolitem_height_added.py @@ -6,7 +6,7 @@ import pytest from chia_rs import Coin, G2Element -from chia.clvm.spend_sim import SimClient, SpendSim, sim_and_client +from chia._tests.util.spend_sim import SimClient, SpendSim, sim_and_client from chia.consensus.constants import ConsensusConstants from chia.consensus.default_constants import DEFAULT_CONSTANTS from chia.full_node.bitcoin_fee_estimator import BitcoinFeeEstimator diff --git a/chia/clvm/spend_sim.py b/chia/_tests/util/spend_sim.py similarity index 97% rename from chia/clvm/spend_sim.py rename to chia/_tests/util/spend_sim.py index 548d8e594d93..0b4ce8cf9f4a 100644 --- a/chia/clvm/spend_sim.py +++ b/chia/_tests/util/spend_sim.py @@ -10,17 +10,17 @@ from typing import Any, Callable, Optional, TypeVar import anyio +from chia_rs import DONT_VALIDATE_SIGNATURE, G2Element, get_flags_for_height_and_constants, run_block_generator2 from chia.consensus.block_rewards import calculate_base_farmer_reward, calculate_pool_reward from chia.consensus.coinbase import create_farmer_coin, create_pool_coin from chia.consensus.constants import ConsensusConstants -from chia.consensus.cost_calculator import NPCResult from chia.consensus.default_constants import DEFAULT_CONSTANTS from chia.full_node.bundle_tools import simple_solution_generator from chia.full_node.coin_store import CoinStore from chia.full_node.hint_store import HintStore from chia.full_node.mempool import Mempool -from chia.full_node.mempool_check_conditions import get_name_puzzle_conditions, get_puzzle_and_solution_for_coin +from chia.full_node.mempool_check_conditions import get_puzzle_and_solution_for_coin from chia.full_node.mempool_manager import MempoolManager from chia.types.blockchain_format.coin import Coin from chia.types.blockchain_format.program import INFINITE_COST @@ -67,14 +67,17 @@ def __init__(self) -> None: def add_cost(self, descriptor: str, spend_bundle: T_SpendBundle) -> T_SpendBundle: program: BlockGenerator = simple_solution_generator(spend_bundle) - npc_result: NPCResult = get_name_puzzle_conditions( - program, + flags = get_flags_for_height_and_constants(DEFAULT_CONSTANTS.HARD_FORK_HEIGHT, DEFAULT_CONSTANTS) + err, conds = run_block_generator2( + bytes(program.program), + [], INFINITE_COST, - mempool_mode=True, - height=DEFAULT_CONSTANTS.HARD_FORK_HEIGHT, - constants=DEFAULT_CONSTANTS, + flags | DONT_VALIDATE_SIGNATURE, + G2Element(), + None, + DEFAULT_CONSTANTS, ) - cost = uint64(0 if npc_result.conds is None else npc_result.conds.cost) + cost = uint64(0 if conds is None else conds.cost) self.cost_dict[descriptor] = cost cost_to_subtract: int = 0 for cs in spend_bundle.coin_spends: diff --git a/chia/_tests/wallet/cat_wallet/test_cat_lifecycle.py b/chia/_tests/wallet/cat_wallet/test_cat_lifecycle.py index 702fccd24a12..00b279e95e2c 100644 --- a/chia/_tests/wallet/cat_wallet/test_cat_lifecycle.py +++ b/chia/_tests/wallet/cat_wallet/test_cat_lifecycle.py @@ -9,7 +9,7 @@ from chia._tests.clvm.benchmark_costs import cost_of_spend_bundle from chia._tests.clvm.test_puzzles import secret_exponent_for_index from chia._tests.conftest import ConsensusMode -from chia.clvm.spend_sim import CostLogger, SimClient, SpendSim, sim_and_client +from chia._tests.util.spend_sim import CostLogger, SimClient, SpendSim, sim_and_client from chia.types.blockchain_format.coin import Coin from chia.types.blockchain_format.program import Program from chia.types.coin_spend import make_spend diff --git a/chia/_tests/wallet/cat_wallet/test_offer_lifecycle.py b/chia/_tests/wallet/cat_wallet/test_offer_lifecycle.py index d4d423fa17e4..bc976945a571 100644 --- a/chia/_tests/wallet/cat_wallet/test_offer_lifecycle.py +++ b/chia/_tests/wallet/cat_wallet/test_offer_lifecycle.py @@ -5,7 +5,7 @@ import pytest from chia_rs import G2Element -from chia.clvm.spend_sim import CostLogger, SimClient, SpendSim, sim_and_client +from chia._tests.util.spend_sim import CostLogger, SimClient, SpendSim, sim_and_client from chia.types.blockchain_format.coin import Coin from chia.types.blockchain_format.program import Program from chia.types.blockchain_format.serialized_program import SerializedProgram diff --git a/chia/_tests/wallet/clawback/test_clawback_lifecycle.py b/chia/_tests/wallet/clawback/test_clawback_lifecycle.py index 6e25a54bd049..bdb5edd4f7f2 100644 --- a/chia/_tests/wallet/clawback/test_clawback_lifecycle.py +++ b/chia/_tests/wallet/clawback/test_clawback_lifecycle.py @@ -8,8 +8,8 @@ from chia._tests.clvm.benchmark_costs import cost_of_spend_bundle from chia._tests.clvm.test_puzzles import public_key_for_index, secret_exponent_for_index from chia._tests.util.key_tool import KeyTool +from chia._tests.util.spend_sim import CostLogger, SimClient, SpendSim, sim_and_client from chia._tests.util.time_out_assert import time_out_assert -from chia.clvm.spend_sim import CostLogger, SimClient, SpendSim, sim_and_client from chia.consensus.default_constants import DEFAULT_CONSTANTS from chia.types.blockchain_format.program import INFINITE_COST, Program from chia.types.blockchain_format.sized_bytes import bytes32 diff --git a/chia/_tests/wallet/dao_wallet/test_dao_clvm.py b/chia/_tests/wallet/dao_wallet/test_dao_clvm.py index a7f5a6e4f92f..0f0ec7d20a0b 100644 --- a/chia/_tests/wallet/dao_wallet/test_dao_clvm.py +++ b/chia/_tests/wallet/dao_wallet/test_dao_clvm.py @@ -6,7 +6,7 @@ from chia_rs import AugSchemeMPL from clvm.casts import int_to_bytes -from chia.clvm.spend_sim import SimClient, SpendSim, sim_and_client +from chia._tests.util.spend_sim import SimClient, SpendSim, sim_and_client from chia.types.blockchain_format.coin import Coin from chia.types.blockchain_format.program import INFINITE_COST, Program from chia.types.blockchain_format.sized_bytes import bytes32 diff --git a/chia/_tests/wallet/db_wallet/test_db_graftroot.py b/chia/_tests/wallet/db_wallet/test_db_graftroot.py index 9bbfa66d256a..7b0e01bec332 100644 --- a/chia/_tests/wallet/db_wallet/test_db_graftroot.py +++ b/chia/_tests/wallet/db_wallet/test_db_graftroot.py @@ -3,7 +3,7 @@ import pytest from chia_rs import G2Element -from chia.clvm.spend_sim import CostLogger, sim_and_client +from chia._tests.util.spend_sim import CostLogger, sim_and_client from chia.types.blockchain_format.coin import Coin from chia.types.blockchain_format.program import Program from chia.types.blockchain_format.sized_bytes import bytes32 diff --git a/chia/_tests/wallet/nft_wallet/test_nft_lifecycle.py b/chia/_tests/wallet/nft_wallet/test_nft_lifecycle.py index 90ccdd91532f..52b146dd4f48 100644 --- a/chia/_tests/wallet/nft_wallet/test_nft_lifecycle.py +++ b/chia/_tests/wallet/nft_wallet/test_nft_lifecycle.py @@ -5,7 +5,7 @@ import pytest from chia_rs import G2Element -from chia.clvm.spend_sim import CostLogger, sim_and_client +from chia._tests.util.spend_sim import CostLogger, sim_and_client from chia.types.blockchain_format.program import Program from chia.types.blockchain_format.sized_bytes import bytes32 from chia.types.coin_spend import make_spend diff --git a/chia/_tests/wallet/vc_wallet/test_vc_lifecycle.py b/chia/_tests/wallet/vc_wallet/test_vc_lifecycle.py index 6cce396c9fc7..5ddeeeb7a7e3 100644 --- a/chia/_tests/wallet/vc_wallet/test_vc_lifecycle.py +++ b/chia/_tests/wallet/vc_wallet/test_vc_lifecycle.py @@ -6,7 +6,7 @@ import pytest from chia_rs import G2Element -from chia.clvm.spend_sim import CostLogger, sim_and_client +from chia._tests.util.spend_sim import CostLogger, sim_and_client from chia.types.blockchain_format.coin import Coin from chia.types.blockchain_format.program import Program from chia.types.blockchain_format.sized_bytes import bytes32 From a80d779fe6599a1769c3e727e95559f80d730cf5 Mon Sep 17 00:00:00 2001 From: Almog De Paz Date: Tue, 22 Oct 2024 22:14:18 +0300 Subject: [PATCH 53/69] refactor full_node add_block (#18584) * refactor * minor fixes * fix_tests * add finish sync to add_blocks_in_batches * add_blocks_in_batches updates wallets * test fixes * fix condition in add_block * add known block to cache * restore comment * remove init file from log dir * fix failed reorg test * revert known block info change * assert height in cat test * wait for wallet height in nft test * assert sync all height for all wallets * test_cat_creation assert height * rename vars * assert height in process states --- .../blockchain/blockchain_test_utils.py | 3 +- chia/_tests/blockchain/test_blockchain.py | 157 ++++++++++++++---- .../test_blockchain_transactions.py | 19 +-- .../full_node/stores/test_full_node_store.py | 17 +- chia/_tests/core/full_node/test_full_node.py | 32 +--- chia/_tests/core/test_db_conversion.py | 6 +- chia/_tests/core/test_db_validation.py | 6 +- chia/_tests/core/test_full_node_rpc.py | 4 +- chia/_tests/environments/wallet.py | 7 +- .../test_third_party_harvesters.py | 6 +- chia/_tests/process_junit.py | 4 +- chia/_tests/util/full_sync.py | 8 +- chia/_tests/util/misc.py | 65 +++----- chia/_tests/util/time_out_assert.py | 41 ++++- .../wallet/cat_wallet/test_cat_wallet.py | 6 +- .../wallet/nft_wallet/test_nft_wallet.py | 3 +- chia/_tests/wallet/sync/test_wallet_sync.py | 12 +- chia/consensus/blockchain.py | 90 ++-------- chia/full_node/full_node.py | 39 ++--- chia/simulator/full_node_simulator.py | 14 +- tools/test_full_sync.py | 17 +- 21 files changed, 311 insertions(+), 245 deletions(-) diff --git a/chia/_tests/blockchain/blockchain_test_utils.py b/chia/_tests/blockchain/blockchain_test_utils.py index 816922a8a039..8c2217e685a8 100644 --- a/chia/_tests/blockchain/blockchain_test_utils.py +++ b/chia/_tests/blockchain/blockchain_test_utils.py @@ -100,7 +100,8 @@ async def _validate_and_add_block( raise AssertionError(f"Expected {expected_error} but got {Err(results.error)}") await check_block_store_invariant(blockchain) return None - + if fork_info is None: + fork_info = ForkInfo(block.height - 1, block.height - 1, block.prev_header_hash) if use_bls_cache: bls_cache = BLSCache(100) else: diff --git a/chia/_tests/blockchain/test_blockchain.py b/chia/_tests/blockchain/test_blockchain.py index 15678c603834..ae86f87b1399 100644 --- a/chia/_tests/blockchain/test_blockchain.py +++ b/chia/_tests/blockchain/test_blockchain.py @@ -28,6 +28,7 @@ from chia.consensus.blockchain import AddBlockResult, Blockchain from chia.consensus.coinbase import create_farmer_coin from chia.consensus.constants import ConsensusConstants +from chia.consensus.find_fork_point import lookup_fork_chain from chia.consensus.full_block_to_block_record import block_to_block_record from chia.consensus.get_block_generator import get_block_generator from chia.consensus.multiprocess_validation import PreValidationResult, pre_validate_blocks_multiprocessing @@ -1865,7 +1866,8 @@ async def test_pre_validation( assert res[n].error is None block = blocks_to_validate[n] start_rb = time.time() - result, err, _ = await empty_blockchain.add_block(block, res[n], None, ssi) + fork_info = ForkInfo(block.height - 1, block.height - 1, block.prev_header_hash) + result, err, _ = await empty_blockchain.add_block(block, res[n], None, ssi, fork_info=fork_info) end_rb = time.time() times_rb.append(end_rb - start_rb) assert err is None @@ -1965,7 +1967,11 @@ async def test_conditions( ) # Ignore errors from pre-validation, we are testing block_body_validation repl_preval_results = replace(pre_validation_results[0], error=None, required_iters=uint64(1)) - code, err, state_change = await b.add_block(blocks[-1], repl_preval_results, None, sub_slot_iters=ssi) + block = blocks[-1] + fork_info = ForkInfo(block.height - 1, block.height - 1, block.prev_header_hash) + code, err, state_change = await b.add_block( + block, repl_preval_results, None, sub_slot_iters=ssi, fork_info=fork_info + ) assert code == AddBlockResult.NEW_PEAK assert err is None assert state_change is not None @@ -2086,7 +2092,11 @@ async def test_timelock_conditions( validate_signatures=True, ) assert pre_validation_results is not None - assert (await b.add_block(blocks[-1], pre_validation_results[0], None, sub_slot_iters=ssi))[0] == expected + block = blocks[-1] + fork_info = ForkInfo(block.height - 1, block.height - 1, block.prev_header_hash) + assert (await b.add_block(block, pre_validation_results[0], None, sub_slot_iters=ssi, fork_info=fork_info))[ + 0 + ] == expected if expected == AddBlockResult.NEW_PEAK: # ensure coin was in fact spent @@ -2166,7 +2176,11 @@ async def test_aggsig_garbage( ) # Ignore errors from pre-validation, we are testing block_body_validation repl_preval_results = replace(pre_validation_results[0], error=None, required_iters=uint64(1)) - res, error, state_change = await b.add_block(blocks[-1], repl_preval_results, None, sub_slot_iters=ssi) + block = blocks[-1] + fork_info = ForkInfo(block.height - 1, block.height - 1, block.prev_header_hash) + res, error, state_change = await b.add_block( + block, repl_preval_results, None, sub_slot_iters=ssi, fork_info=fork_info + ) assert res == AddBlockResult.NEW_PEAK assert error is None assert state_change is not None and state_change.fork_height == uint32(2) @@ -2289,7 +2303,11 @@ async def test_ephemeral_timelock( validate_signatures=True, ) assert pre_validation_results is not None - assert (await b.add_block(blocks[-1], pre_validation_results[0], None, sub_slot_iters=ssi))[0] == expected + block = blocks[-1] + fork_info = ForkInfo(block.height - 1, block.height - 1, block.prev_header_hash) + assert (await b.add_block(block, pre_validation_results[0], None, sub_slot_iters=ssi, fork_info=fork_info))[ + 0 + ] == expected if expected == AddBlockResult.NEW_PEAK: # ensure coin1 was in fact spent @@ -2629,12 +2647,15 @@ async def test_cost_exceeds_max( ) ssi = b.constants.SUB_SLOT_ITERS_STARTING diff = b.constants.DIFFICULTY_STARTING + block = blocks[-1] + fork_info = ForkInfo(block.height - 1, block.height - 1, block.prev_header_hash) err = ( await b.add_block( blocks[-1], PreValidationResult(None, uint64(1), npc_result.conds, True, uint32(0)), None, sub_slot_iters=ssi, + fork_info=fork_info, ) )[1] assert err in [Err.BLOCK_COST_EXCEEDS_MAX] @@ -2707,8 +2728,13 @@ async def test_invalid_cost_in_block( constants=bt.constants, ) ssi = b.constants.SUB_SLOT_ITERS_STARTING + fork_info = ForkInfo(block_2.height - 1, block_2.height - 1, block_2.prev_header_hash) _, err, _ = await b.add_block( - block_2, PreValidationResult(None, uint64(1), npc_result.conds, False, uint32(0)), None, sub_slot_iters=ssi + block_2, + PreValidationResult(None, uint64(1), npc_result.conds, False, uint32(0)), + None, + sub_slot_iters=ssi, + fork_info=fork_info, ) assert err == Err.INVALID_BLOCK_COST @@ -2736,8 +2762,13 @@ async def test_invalid_cost_in_block( height=softfork_height, constants=bt.constants, ) + fork_info = ForkInfo(block_2.height - 1, block_2.height - 1, block_2.prev_header_hash) _, err, _ = await b.add_block( - block_2, PreValidationResult(None, uint64(1), npc_result.conds, False, uint32(0)), None, sub_slot_iters=ssi + block_2, + PreValidationResult(None, uint64(1), npc_result.conds, False, uint32(0)), + None, + sub_slot_iters=ssi, + fork_info=fork_info, ) assert err == Err.INVALID_BLOCK_COST @@ -2766,9 +2797,13 @@ async def test_invalid_cost_in_block( npc_result = get_name_puzzle_conditions( block_generator, max_cost, mempool_mode=False, height=softfork_height, constants=bt.constants ) - + fork_info = ForkInfo(block_2.height - 1, block_2.height - 1, block_2.prev_header_hash) result, err, _ = await b.add_block( - block_2, PreValidationResult(None, uint64(1), npc_result.conds, False, uint32(0)), None, sub_slot_iters=ssi + block_2, + PreValidationResult(None, uint64(1), npc_result.conds, False, uint32(0)), + None, + sub_slot_iters=ssi, + fork_info=fork_info, ) assert err == Err.INVALID_BLOCK_COST @@ -3043,20 +3078,31 @@ async def test_double_spent_in_reorg(self, empty_blockchain: Blockchain, bt: Blo blocks_reorg = bt.get_consecutive_blocks( 1, block_list_input=blocks_reorg, guarantee_transaction_block=True, transaction_data=tx_2 ) - - await _validate_and_add_block(b, blocks_reorg[-1], expected_error=Err.UNKNOWN_UNSPENT) + peak = b.get_peak() + assert peak is not None + fork_info = await get_fork_info(b, blocks_reorg[-1], peak) + await _validate_and_add_block(b, blocks_reorg[-1], expected_error=Err.UNKNOWN_UNSPENT, fork_info=fork_info) # Finally add the block to the fork (spending both in same bundle, this is ephemeral) agg = SpendBundle.aggregate([tx, tx_2]) blocks_reorg = bt.get_consecutive_blocks( 1, block_list_input=blocks_reorg[:-1], guarantee_transaction_block=True, transaction_data=agg ) - await _validate_and_add_block(b, blocks_reorg[-1], expected_result=AddBlockResult.ADDED_AS_ORPHAN) + + peak = b.get_peak() + assert peak is not None + fork_info = await get_fork_info(b, blocks_reorg[-1], peak) + await _validate_and_add_block( + b, blocks_reorg[-1], expected_result=AddBlockResult.ADDED_AS_ORPHAN, fork_info=fork_info + ) blocks_reorg = bt.get_consecutive_blocks( 1, block_list_input=blocks_reorg, guarantee_transaction_block=True, transaction_data=tx_2 ) - await _validate_and_add_block(b, blocks_reorg[-1], expected_error=Err.DOUBLE_SPEND_IN_FORK) + peak = b.get_peak() + assert peak is not None + fork_info = await get_fork_info(b, blocks_reorg[-1], peak) + await _validate_and_add_block(b, blocks_reorg[-1], expected_error=Err.DOUBLE_SPEND_IN_FORK, fork_info=fork_info) rewards_ph = wt.get_new_puzzlehash() blocks_reorg = bt.get_consecutive_blocks( @@ -3065,9 +3111,13 @@ async def test_double_spent_in_reorg(self, empty_blockchain: Blockchain, bt: Blo guarantee_transaction_block=True, farmer_reward_puzzle_hash=rewards_ph, ) + + peak = b.get_peak() + assert peak is not None + fork_info = await get_fork_info(b, blocks_reorg[-10], peak) for block in blocks_reorg[-10:]: await _validate_and_add_block_multi_result( - b, block, expected_result=[AddBlockResult.ADDED_AS_ORPHAN, AddBlockResult.NEW_PEAK] + b, block, expected_result=[AddBlockResult.ADDED_AS_ORPHAN, AddBlockResult.NEW_PEAK], fork_info=fork_info ) # ephemeral coin is spent @@ -3282,16 +3332,19 @@ async def test_get_tx_peak_reorg( assert maybe_header_hash(b.get_tx_peak()) == last_tx_block reorg_last_tx_block: Optional[bytes32] = None - + fork_block = blocks[9] + fork_info = ForkInfo(fork_block.height, fork_block.height, fork_block.header_hash) blocks_reorg_chain = bt.get_consecutive_blocks(7, blocks[:10], seed=b"2") assert blocks_reorg_chain[reorg_point].is_transaction_block() is False for reorg_block in blocks_reorg_chain: if reorg_block.height < 10: await _validate_and_add_block(b, reorg_block, expected_result=AddBlockResult.ALREADY_HAVE_BLOCK) elif reorg_block.height < reorg_point: - await _validate_and_add_block(b, reorg_block, expected_result=AddBlockResult.ADDED_AS_ORPHAN) + await _validate_and_add_block( + b, reorg_block, expected_result=AddBlockResult.ADDED_AS_ORPHAN, fork_info=fork_info + ) elif reorg_block.height >= reorg_point: - await _validate_and_add_block(b, reorg_block) + await _validate_and_add_block(b, reorg_block, fork_info=fork_info) if reorg_block.is_transaction_block(): reorg_last_tx_block = reorg_block.header_hash @@ -3347,7 +3400,12 @@ async def test_long_reorg( assert pre_validation_results[i].error is None if (block.height % 100) == 0: print(f"main chain: {block.height:4} weight: {block.weight}") - (result, err, _) = await b.add_block(block, pre_validation_results[i], None, sub_slot_iters=ssi) + + fork_info: ForkInfo = ForkInfo(block.height - 1, block.height - 1, block.prev_header_hash) + assert fork_info is not None + (result, err, _) = await b.add_block( + block, pre_validation_results[i], None, sub_slot_iters=ssi, fork_info=fork_info + ) await check_block_store_invariant(b) assert err is None assert result == AddBlockResult.NEW_PEAK @@ -3372,7 +3430,7 @@ async def test_long_reorg( b.clean_block_records() first_peak = b.get_peak() - fork_info: Optional[ForkInfo] = None + fork_info2 = None for reorg_block in reorg_blocks: if (reorg_block.height % 100) == 0: peak = b.get_peak() @@ -3386,14 +3444,14 @@ async def test_long_reorg( if reorg_block.height < num_blocks_chain_2_start: await _validate_and_add_block(b, reorg_block, expected_result=AddBlockResult.ALREADY_HAVE_BLOCK) elif reorg_block.weight <= chain_1_weight: - if fork_info is None: - fork_info = ForkInfo(reorg_block.height - 1, reorg_block.height - 1, reorg_block.prev_header_hash) + if fork_info2 is None: + fork_info2 = ForkInfo(reorg_block.height - 1, reorg_block.height - 1, reorg_block.prev_header_hash) await _validate_and_add_block( - b, reorg_block, expected_result=AddBlockResult.ADDED_AS_ORPHAN, fork_info=fork_info + b, reorg_block, expected_result=AddBlockResult.ADDED_AS_ORPHAN, fork_info=fork_info2 ) elif reorg_block.weight > chain_1_weight: await _validate_and_add_block( - b, reorg_block, expected_result=AddBlockResult.NEW_PEAK, fork_info=fork_info + b, reorg_block, expected_result=AddBlockResult.NEW_PEAK, fork_info=fork_info2 ) # if these asserts fires, there was no reorg @@ -3546,9 +3604,10 @@ async def test_reorg_transaction(self, empty_blockchain: Blockchain, bt: BlockTo ) for block in blocks: await _validate_and_add_block(b, block) - + fork_block = blocks[11] + fork_info = ForkInfo(fork_block.height, fork_block.height, fork_block.header_hash) for block in blocks_fork: - await _validate_and_add_block_no_error(b, block) + await _validate_and_add_block_no_error(b, block, fork_info=fork_info) @pytest.mark.anyio async def test_get_header_blocks_in_range_tx_filter(self, empty_blockchain: Blockchain, bt: BlockTools) -> None: @@ -3790,8 +3849,10 @@ async def test_chain_failed_rollback(empty_blockchain: Blockchain, bt: BlockTool await b.coin_store.rollback_to_block(2) print(f"{await b.coin_store.get_coin_record(spend_bundle.coin_spends[0].coin.name())}") + fork_block = blocks_reorg_chain[10 - 1] + fork_info = ForkInfo(fork_block.height, fork_block.height, fork_block.header_hash) with pytest.raises(ValueError, match="Invalid operation to set spent"): - await _validate_and_add_block(b, blocks_reorg_chain[-1]) + await _validate_and_add_block(b, blocks_reorg_chain[-1], fork_info=fork_info) peak = b.get_peak() assert peak is not None @@ -3899,7 +3960,9 @@ async def test_reorg_flip_flop(empty_blockchain: Blockchain, bt: BlockTools) -> ValidationState(ssi, diff, None), validate_signatures=False, ) - _, err, _ = await b.add_block(block1, preval[0], None, sub_slot_iters=ssi) + + fork_info = ForkInfo(block1.height - 1, block1.height - 1, block1.prev_header_hash) + _, err, _ = await b.add_block(block1, preval[0], None, sub_slot_iters=ssi, fork_info=fork_info) assert err is None preval = await pre_validate_blocks_multiprocessing( b.constants, @@ -3910,7 +3973,9 @@ async def test_reorg_flip_flop(empty_blockchain: Blockchain, bt: BlockTools) -> ValidationState(ssi, diff, None), validate_signatures=False, ) - _, err, _ = await b.add_block(block2, preval[0], None, sub_slot_iters=ssi) + + fork_info = ForkInfo(block2.height - 1, block2.height - 1, block2.prev_header_hash) + _, err, _ = await b.add_block(block2, preval[0], None, sub_slot_iters=ssi, fork_info=fork_info) assert err is None peak = b.get_peak() @@ -3947,7 +4012,8 @@ async def test_get_tx_peak(default_400_blocks: list[FullBlock], empty_blockchain last_tx_block_record = None for b, prevalidation_res in zip(test_blocks, res): assert bc.get_tx_peak() == last_tx_block_record - _, err, _ = await bc.add_block(b, prevalidation_res, None, sub_slot_iters=ssi) + fork_info = ForkInfo(b.height - 1, b.height - 1, b.prev_header_hash) + _, err, _ = await bc.add_block(b, prevalidation_res, None, sub_slot_iters=ssi, fork_info=fork_info) assert err is None if b.is_transaction_block(): @@ -4076,3 +4142,36 @@ async def test_lookup_block_generators( b.clean_block_records() with pytest.raises(AssertionError): await b.lookup_block_generators(blocks_1[600].prev_header_hash, {uint32(3)}) + + +async def get_fork_info(blockchain: Blockchain, block: FullBlock, peak: BlockRecord) -> ForkInfo: + fork_chain, fork_hash = await lookup_fork_chain( + blockchain, + (peak.height, peak.header_hash), + (block.height - 1, block.prev_header_hash), + blockchain.constants, + ) + # now we know how long the fork is, and can compute the fork + # height. + fork_height = block.height - len(fork_chain) - 1 + fork_info = ForkInfo(fork_height, fork_height, fork_hash) + + # now run all the blocks of the fork to compute the additions + # and removals. They are recorded in the fork_info object + counter = 0 + start = time.monotonic() + for height in range(fork_info.fork_height + 1, block.height): + fork_block: Optional[FullBlock] = await blockchain.block_store.get_full_block(fork_chain[uint32(height)]) + assert fork_block is not None + assert fork_block.height - 1 == fork_info.peak_height + assert fork_block.height == 0 or fork_block.prev_header_hash == fork_info.peak_hash + await blockchain.run_single_block(fork_block, fork_info) + counter += 1 + end = time.monotonic() + log.info( + f"executed {counter} block generators in {end - start:2f} s. " + f"{len(fork_info.additions_since_fork)} additions, " + f"{len(fork_info.removals_since_fork)} removals" + ) + + return fork_info diff --git a/chia/_tests/blockchain/test_blockchain_transactions.py b/chia/_tests/blockchain/test_blockchain_transactions.py index aca7afb2b2ec..dfccb27f4aa4 100644 --- a/chia/_tests/blockchain/test_blockchain_transactions.py +++ b/chia/_tests/blockchain/test_blockchain_transactions.py @@ -7,6 +7,7 @@ from chia._tests.blockchain.blockchain_test_utils import _validate_and_add_block from chia._tests.util.generator_tools_testing import run_and_get_removals_and_additions +from chia._tests.util.misc import add_blocks_in_batches from chia.full_node.full_node_api import FullNodeAPI from chia.protocols import wallet_protocol from chia.server.server import ChiaServer @@ -320,8 +321,7 @@ async def test_validate_blockchain_spend_reorg_coin( transaction_data=spend_bundle, guarantee_transaction_block=True, ) - - await full_node_api_1.full_node.add_block(new_blocks[-1]) + await add_blocks_in_batches([new_blocks[-1]], full_node_api_1.full_node, blocks[5].prev_header_hash) coin_2 = None for coin in run_and_get_removals_and_additions( @@ -345,7 +345,7 @@ async def test_validate_blockchain_spend_reorg_coin( transaction_data=spend_bundle, guarantee_transaction_block=True, ) - await full_node_api_1.full_node.add_block(new_blocks[-1]) + await add_blocks_in_batches([new_blocks[-1]], full_node_api_1.full_node, blocks[5].prev_header_hash) coin_3 = None for coin in run_and_get_removals_and_additions( @@ -369,8 +369,7 @@ async def test_validate_blockchain_spend_reorg_coin( transaction_data=spend_bundle, guarantee_transaction_block=True, ) - - await full_node_api_1.full_node.add_block(new_blocks[-1]) + await add_blocks_in_batches([new_blocks[-1]], full_node_api_1.full_node, blocks[5].prev_header_hash) @pytest.mark.anyio async def test_validate_blockchain_spend_reorg_cb_coin( @@ -382,9 +381,7 @@ async def test_validate_blockchain_spend_reorg_cb_coin( receiver_1_puzzlehash = WALLET_A_PUZZLE_HASHES[1] full_node_api_1, _, _, _, bt = two_nodes blocks = bt.get_consecutive_blocks(num_blocks, farmer_reward_puzzle_hash=coinbase_puzzlehash) - - for block in blocks: - await full_node_api_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_api_1.full_node) # Spends a coinbase created in reorg new_blocks = bt.get_consecutive_blocks( @@ -395,8 +392,7 @@ async def test_validate_blockchain_spend_reorg_cb_coin( guarantee_transaction_block=True, ) - for block in new_blocks: - await full_node_api_1.full_node.add_block(block) + await add_blocks_in_batches(new_blocks, full_node_api_1.full_node, blocks[6].prev_header_hash) spend_block = new_blocks[-1] spend_coin = None @@ -414,8 +410,7 @@ async def test_validate_blockchain_spend_reorg_cb_coin( transaction_data=spend_bundle, guarantee_transaction_block=True, ) - - await full_node_api_1.full_node.add_block(new_blocks[-1]) + await add_blocks_in_batches([new_blocks[-1]], full_node_api_1.full_node, blocks[6].prev_header_hash) @pytest.mark.anyio async def test_validate_blockchain_spend_reorg_since_genesis( diff --git a/chia/_tests/core/full_node/stores/test_full_node_store.py b/chia/_tests/core/full_node/stores/test_full_node_store.py index 7ead0a57a4da..dbb0a8a0eacb 100644 --- a/chia/_tests/core/full_node/stores/test_full_node_store.py +++ b/chia/_tests/core/full_node/stores/test_full_node_store.py @@ -10,6 +10,7 @@ from chia._tests.blockchain.blockchain_test_utils import _validate_and_add_block, _validate_and_add_block_no_error from chia._tests.util.blockchain import create_blockchain from chia._tests.util.blockchain_mock import BlockchainMock +from chia.consensus.block_body_validation import ForkInfo from chia.consensus.blockchain import AddBlockResult, Blockchain from chia.consensus.constants import ConsensusConstants from chia.consensus.default_constants import DEFAULT_CONSTANTS @@ -477,11 +478,14 @@ async def test_basic_store( normalized_to_identity_cc_ip=normalized_to_identity, normalized_to_identity_cc_sp=normalized_to_identity, ) + + fork_info = ForkInfo(-1, -1, blockchain.constants.GENESIS_CHALLENGE) for block in blocks_reorg: + peak = blockchain.get_peak() assert peak is not None - await _validate_and_add_block_no_error(blockchain, block) + await _validate_and_add_block_no_error(blockchain, block, fork_info=fork_info) peak_here = blockchain.get_peak() assert peak_here is not None @@ -559,7 +563,7 @@ async def test_basic_store( normalized_to_identity_cc_ip=normalized_to_identity, normalized_to_identity_cc_sp=normalized_to_identity, ) - await _validate_and_add_block(blockchain, blocks[-1]) + await _validate_and_add_block(blockchain, blocks[-1], fork_info=fork_info) peak_here = blockchain.get_peak() assert peak_here is not None if peak_here.header_hash == blocks[-1].header_hash: @@ -911,8 +915,9 @@ async def test_basic_store( normalized_to_identity_icc_eos=normalized_to_identity, ) + fork_info = ForkInfo(-1, -1, blockchain.constants.GENESIS_CHALLENGE) for block in blocks[:5]: - await _validate_and_add_block_no_error(blockchain, block) + await _validate_and_add_block_no_error(blockchain, block, fork_info=fork_info) sb = blockchain.block_record(block.header_hash) result = await blockchain.get_sp_and_ip_sub_slots(block.header_hash) assert result is not None @@ -941,7 +946,7 @@ async def test_basic_store( ) store.add_to_future_ip(new_ip) - await _validate_and_add_block_no_error(blockchain, prev_block) + await _validate_and_add_block_no_error(blockchain, prev_block, fork_info=fork_info) result = await blockchain.get_sp_and_ip_sub_slots(prev_block.header_hash) assert result is not None sp_sub_slot, ip_sub_slot = result @@ -983,13 +988,13 @@ async def test_basic_store( # Then do a reorg up to B2, removing all signage points after B2, but not before log.warning(f"Adding blocks up to {blocks[-1]}") for block in blocks: - await _validate_and_add_block_no_error(blockchain, block) + await _validate_and_add_block_no_error(blockchain, block, fork_info=fork_info) log.warning("Starting loop") while True: log.warning("Looping") blocks = custom_block_tools.get_consecutive_blocks(1, block_list_input=blocks, skip_slots=1) - await _validate_and_add_block_no_error(blockchain, blocks[-1]) + await _validate_and_add_block_no_error(blockchain, blocks[-1], fork_info=fork_info) peak = blockchain.get_peak() assert peak is not None result = await blockchain.get_sp_and_ip_sub_slots(peak.header_hash) diff --git a/chia/_tests/core/full_node/test_full_node.py b/chia/_tests/core/full_node/test_full_node.py index 0d4616c58632..0079a7d58e1b 100644 --- a/chia/_tests/core/full_node/test_full_node.py +++ b/chia/_tests/core/full_node/test_full_node.py @@ -425,8 +425,9 @@ async def check_transaction_confirmed(transaction) -> bool: diff = bt.constants.DIFFICULTY_STARTING reog_blocks = bt.get_consecutive_blocks(14) for r in range(0, len(reog_blocks), 3): + fork_info = ForkInfo(-1, -1, bt.constants.GENESIS_CHALLENGE) for reorg_block in reog_blocks[:r]: - await _validate_and_add_block_no_error(blockchain, reorg_block) + await _validate_and_add_block_no_error(blockchain, reorg_block, fork_info=fork_info) for i in range(1, height): results = await pre_validate_blocks_multiprocessing( blockchain.constants, @@ -442,8 +443,9 @@ async def check_transaction_confirmed(transaction) -> bool: assert result.error is None for r in range(0, len(all_blocks), 3): + fork_info = ForkInfo(-1, -1, bt.constants.GENESIS_CHALLENGE) for block in all_blocks[:r]: - await _validate_and_add_block_no_error(blockchain, block) + await _validate_and_add_block_no_error(blockchain, block, fork_info=fork_info) for i in range(1, height): results = await pre_validate_blocks_multiprocessing( blockchain.constants, @@ -984,9 +986,7 @@ async def test_new_transaction_and_mempool(self, wallet_nodes, self_hostname, se block_list_input=blocks[:-1], guarantee_transaction_block=True, ) - for block in blocks[-2:]: - await full_node_1.full_node.add_block(block, peer) - + await add_blocks_in_batches(blocks[-2:], full_node_1.full_node, blocks[-2].prev_header_hash) # Can now resubmit a transaction after the reorg status, err = await full_node_1.full_node.add_transaction( successful_bundle, successful_bundle.name(), peer, test=True @@ -2287,16 +2287,7 @@ async def test_long_reorg( # not in the cache. We need to explicitly prune the cache to get that # effect. node.full_node.blockchain.clean_block_records() - - fork_info: Optional[ForkInfo] = None - for b in reorg_blocks: - if (b.height % 128) == 0: - peak = node.full_node.blockchain.get_peak() - print(f"reorg chain: {b.height:4} " f"weight: {b.weight:7} " f"peak: {str(peak.header_hash)[:6]}") - if b.height > fork_point and fork_info is None: - fork_info = ForkInfo(fork_point, fork_point, reorg_blocks[fork_point].header_hash) - await node.full_node.add_block(b, fork_info=fork_info) - + await add_blocks_in_batches(reorg_blocks, node.full_node) # if these asserts fires, there was no reorg peak = node.full_node.blockchain.get_peak() assert peak.header_hash != chain_1_peak @@ -2311,7 +2302,6 @@ async def test_long_reorg( assert peak.height > chain_1_height else: assert peak.height < chain_1_height - # now reorg back to the original chain # this exercises the case where we have some of the blocks in the DB already node.full_node.blockchain.clean_block_records() @@ -2321,15 +2311,7 @@ async def test_long_reorg( blocks = default_10000_blocks[fork_point - 100 : 3200] else: blocks = default_10000_blocks[fork_point - 100 : 5500] - - fork_block = blocks[0] - fork_info = ForkInfo(fork_block.height - 1, fork_block.height - 1, fork_block.prev_header_hash) - for b in blocks: - if (b.height % 128) == 0: - peak = node.full_node.blockchain.get_peak() - print(f"original chain: {b.height:4} " f"weight: {b.weight:7} " f"peak: {str(peak.header_hash)[:6]}") - await node.full_node.add_block(b, fork_info=fork_info) - + await add_blocks_in_batches(blocks, node.full_node) # if these asserts fires, there was no reorg back to the original chain peak = node.full_node.blockchain.get_peak() assert peak.header_hash != chain_2_peak diff --git a/chia/_tests/core/test_db_conversion.py b/chia/_tests/core/test_db_conversion.py index c79e46a4786f..53892494aa2c 100644 --- a/chia/_tests/core/test_db_conversion.py +++ b/chia/_tests/core/test_db_conversion.py @@ -7,6 +7,7 @@ from chia._tests.util.temp_file import TempFile from chia.cmds.db_upgrade_func import convert_v1_to_v2 +from chia.consensus.block_body_validation import ForkInfo from chia.consensus.blockchain import Blockchain from chia.consensus.multiprocess_validation import PreValidationResult from chia.full_node.block_store import BlockStore @@ -74,7 +75,10 @@ async def test_blocks(default_1000_blocks, with_hints: bool): sub_slot_iters = block.finished_sub_slots[0].challenge_chain.new_sub_slot_iters # await _validate_and_add_block(bc, block) results = PreValidationResult(None, uint64(1), None, False, uint32(0)) - result, err, _ = await bc.add_block(block, results, None, sub_slot_iters=sub_slot_iters) + fork_info = ForkInfo(block.height - 1, block.height - 1, block.prev_header_hash) + result, err, _ = await bc.add_block( + block, results, None, sub_slot_iters=sub_slot_iters, fork_info=fork_info + ) assert err is None # now, convert v1 in_file to v2 out_file diff --git a/chia/_tests/core/test_db_validation.py b/chia/_tests/core/test_db_validation.py index 71090c550494..1d661e04e457 100644 --- a/chia/_tests/core/test_db_validation.py +++ b/chia/_tests/core/test_db_validation.py @@ -9,6 +9,7 @@ from chia._tests.util.temp_file import TempFile from chia.cmds.db_validate_func import validate_v2 +from chia.consensus.block_body_validation import ForkInfo from chia.consensus.blockchain import Blockchain from chia.consensus.default_constants import DEFAULT_CONSTANTS from chia.consensus.multiprocess_validation import PreValidationResult @@ -144,7 +145,10 @@ async def make_db(db_file: Path, blocks: list[FullBlock]) -> None: if block.finished_sub_slots[0].challenge_chain.new_sub_slot_iters is not None: sub_slot_iters = block.finished_sub_slots[0].challenge_chain.new_sub_slot_iters results = PreValidationResult(None, uint64(1), None, False, uint32(0)) - result, err, _ = await bc.add_block(block, results, None, sub_slot_iters=sub_slot_iters) + fork_info = ForkInfo(block.height - 1, block.height - 1, block.prev_header_hash) + result, err, _ = await bc.add_block( + block, results, None, sub_slot_iters=sub_slot_iters, fork_info=fork_info + ) assert err is None diff --git a/chia/_tests/core/test_full_node_rpc.py b/chia/_tests/core/test_full_node_rpc.py index 128a9f294594..79ba7ff51628 100644 --- a/chia/_tests/core/test_full_node_rpc.py +++ b/chia/_tests/core/test_full_node_rpc.py @@ -9,6 +9,7 @@ from chia._tests.blockchain.blockchain_test_utils import _validate_and_add_block from chia._tests.conftest import ConsensusMode from chia._tests.connection_utils import connect_and_get_peer +from chia._tests.util.misc import add_blocks_in_batches from chia._tests.util.rpc import validate_get_routes from chia._tests.util.time_out_assert import time_out_assert from chia.consensus.block_record import BlockRecord @@ -535,8 +536,7 @@ async def test_signage_points(two_nodes_sim_and_wallets_services, empty_blockcha # Perform a reorg blocks = bt.get_consecutive_blocks(12, seed=b"1234") - for block in blocks: - await full_node_api_1.full_node.add_block(block) + await add_blocks_in_batches(blocks, full_node_api_1.full_node) # Signage point is no longer in the blockchain res = await client.get_recent_signage_point_or_eos(sp.cc_vdf.output.get_hash(), None) diff --git a/chia/_tests/environments/wallet.py b/chia/_tests/environments/wallet.py index 1c8eefcd713d..6fdaeced2445 100644 --- a/chia/_tests/environments/wallet.py +++ b/chia/_tests/environments/wallet.py @@ -298,11 +298,12 @@ async def process_pending_states( puzzle_hash_indexes.append(ph_indexes) pending_txs: list[list[TransactionRecord]] = [] - + peak = self.full_node.full_node.blockchain.get_peak_height() + assert peak is not None # Check balances prior to block try: for i, env in enumerate(self.environments): - await self.full_node.wait_for_wallet_synced(wallet_node=env.node, timeout=20) + await self.full_node.wait_for_wallet_synced(wallet_node=env.node, timeout=20, peak_height=peak) try: pending_txs.append( await env.wait_for_transactions_to_settle( @@ -322,8 +323,6 @@ async def process_pending_states( raise ValueError("Error before block was farmed") # Farm block - peak = self.full_node.full_node.blockchain.get_peak_height() - assert peak is not None await self.full_node.farm_blocks_to_puzzlehash(count=1, guarantee_transaction_blocks=True) # Check balances after block diff --git a/chia/_tests/farmer_harvester/test_third_party_harvesters.py b/chia/_tests/farmer_harvester/test_third_party_harvesters.py index 3e18c6f21433..6b102f81f152 100644 --- a/chia/_tests/farmer_harvester/test_third_party_harvesters.py +++ b/chia/_tests/farmer_harvester/test_third_party_harvesters.py @@ -13,6 +13,7 @@ from pytest_mock import MockerFixture from chia._tests.util.time_out_assert import time_out_assert +from chia.consensus.block_body_validation import ForkInfo from chia.consensus.blockchain import AddBlockResult from chia.consensus.difficulty_adjustment import get_next_sub_slot_iters_and_difficulty from chia.consensus.multiprocess_validation import PreValidationResult, pre_validate_blocks_multiprocessing @@ -451,7 +452,10 @@ async def add_test_blocks_into_full_node(blocks: list[FullBlock], full_node: Ful if block.height != 0 and len(block.finished_sub_slots) > 0: # pragma: no cover if block.finished_sub_slots[0].challenge_chain.new_sub_slot_iters is not None: ssi = block.finished_sub_slots[0].challenge_chain.new_sub_slot_iters - r, _, _ = await full_node.blockchain.add_block(blocks[i], pre_validation_results[i], None, sub_slot_iters=ssi) + fork_info = ForkInfo(block.height - 1, block.height - 1, block.prev_header_hash) + r, _, _ = await full_node.blockchain.add_block( + blocks[i], pre_validation_results[i], None, sub_slot_iters=ssi, fork_info=fork_info + ) assert r == AddBlockResult.NEW_PEAK diff --git a/chia/_tests/process_junit.py b/chia/_tests/process_junit.py index e348f374c99c..8442c88464eb 100644 --- a/chia/_tests/process_junit.py +++ b/chia/_tests/process_junit.py @@ -12,8 +12,8 @@ import click import lxml.etree -from chia._tests.util.misc import BenchmarkData, DataTypeProtocol, TestId -from chia._tests.util.time_out_assert import TimeOutAssertData +from chia._tests.util.misc import BenchmarkData, TestId +from chia._tests.util.time_out_assert import DataTypeProtocol, TimeOutAssertData supported_data_types: list[type[DataTypeProtocol]] = [TimeOutAssertData, BenchmarkData] supported_data_types_by_tag: dict[str, type[DataTypeProtocol]] = {cls.tag: cls for cls in supported_data_types} diff --git a/chia/_tests/util/full_sync.py b/chia/_tests/util/full_sync.py index e6d53a1ff236..2427b7749e2d 100644 --- a/chia/_tests/util/full_sync.py +++ b/chia/_tests/util/full_sync.py @@ -15,6 +15,7 @@ from chia._tests.util.constants import test_constants as TEST_CONSTANTS from chia.cmds.init_funcs import chia_init +from chia.consensus.block_body_validation import ForkInfo from chia.consensus.constants import replace_str_to_bytes from chia.consensus.default_constants import DEFAULT_CONSTANTS from chia.consensus.difficulty_adjustment import get_next_sub_slot_iters_and_difficulty @@ -208,8 +209,13 @@ async def run_sync_test( ssi, diff = get_next_sub_slot_iters_and_difficulty( full_node.constants, True, block_record, full_node.blockchain ) + fork_height = block_batch[0].height - 1 + header_hash = block_batch[0].prev_header_hash success, summary, err = await full_node.add_block_batch( - block_batch, peer_info, None, ValidationState(ssi, diff, None) + block_batch, + peer_info, + ForkInfo(fork_height, fork_height, header_hash), + ValidationState(ssi, diff, None), ) end_height = block_batch[-1].height full_node.blockchain.clean_block_record(end_height - full_node.constants.BLOCKS_CACHE_SIZE) diff --git a/chia/_tests/util/misc.py b/chia/_tests/util/misc.py index b257fc2a576b..c80a89e21db5 100644 --- a/chia/_tests/util/misc.py +++ b/chia/_tests/util/misc.py @@ -12,12 +12,10 @@ import ssl import subprocess import sys -from collections.abc import Awaitable, Collection, Iterable, Iterator +from collections.abc import Awaitable, Collection, Iterator from concurrent.futures import Future from dataclasses import dataclass, field from enum import Enum -from inspect import getframeinfo, stack -from pathlib import Path from statistics import mean from textwrap import dedent from time import thread_time @@ -35,9 +33,12 @@ import chia import chia._tests from chia._tests import ether +from chia._tests.connection_utils import add_dummy_connection from chia._tests.core.data_layer.util import ChiaRoot +from chia._tests.util.time_out_assert import DataTypeProtocol, caller_file_and_line +from chia.consensus.block_body_validation import ForkInfo from chia.consensus.difficulty_adjustment import get_next_sub_slot_iters_and_difficulty -from chia.full_node.full_node import FullNode +from chia.full_node.full_node import FullNode, PeakPostProcessingResult from chia.full_node.mempool import Mempool from chia.types.blockchain_format.sized_bytes import bytes32 from chia.types.condition_opcodes import ConditionOpcode @@ -598,27 +599,6 @@ def marshal(self) -> dict[str, Any]: } -T = TypeVar("T") - - -@dataclasses.dataclass(frozen=True) -class DataTypeProtocol(Protocol): - tag: ClassVar[str] - - line: int - path: Path - label: str - duration: float - limit: float - - __match_args__: ClassVar[tuple[str, ...]] = () - - @classmethod - def unmarshal(cls: type[T], marshalled: dict[str, Any]) -> T: ... - - def marshal(self) -> dict[str, Any]: ... - - T_ComparableEnum = TypeVar("T_ComparableEnum", bound="ComparableEnum") @@ -660,20 +640,6 @@ def __ge__(self: T_ComparableEnum, other: T_ComparableEnum) -> object: return self.value.__ge__(other.value) -def caller_file_and_line(distance: int = 1, relative_to: Iterable[Path] = ()) -> tuple[str, int]: - caller = getframeinfo(stack()[distance + 1][0]) - - caller_path = Path(caller.filename) - options: list[str] = [caller_path.as_posix()] - for path in relative_to: - try: - options.append(caller_path.relative_to(path).as_posix()) - except ValueError: - pass - - return min(options, key=len), caller.lineno - - async def add_blocks_in_batches( blocks: list[FullBlock], full_node: FullNode, @@ -682,22 +648,39 @@ async def add_blocks_in_batches( if header_hash is None: diff = full_node.constants.DIFFICULTY_STARTING ssi = full_node.constants.SUB_SLOT_ITERS_STARTING + fork_height = -1 + fork_info = ForkInfo(-1, fork_height, full_node.constants.GENESIS_CHALLENGE) else: block_record = await full_node.blockchain.get_block_record_from_db(header_hash) + assert block_record is not None ssi, diff = get_next_sub_slot_iters_and_difficulty( full_node.constants, True, block_record, full_node.blockchain ) + fork_height = block_record.height + fork_info = ForkInfo(block_record.height, fork_height, block_record.header_hash) + + _, dummy_node_id = await add_dummy_connection(full_node.server, "127.0.0.1", 12315) + dummy_peer = full_node.server.all_connections[dummy_node_id] vs = ValidationState(ssi, diff, None) + for block_batch in to_batches(blocks, 64): b = block_batch.entries[0] if (b.height % 128) == 0: print(f"main chain: {b.height:4} weight: {b.weight}") # vs is updated by the call to add_block_batch() - success, _, err = await full_node.add_block_batch( + success, state_change_summary, err = await full_node.add_block_batch( block_batch.entries, PeerInfo("0.0.0.0", 0), - None, + fork_info, vs, ) assert err is None assert success is True + if state_change_summary is not None: + peak_fb: Optional[FullBlock] = await full_node.blockchain.get_full_peak() + assert peak_fb is not None + ppp_result: PeakPostProcessingResult = await full_node.peak_post_processing( + peak_fb, state_change_summary, dummy_peer + ) + await full_node.peak_post_processing_2(peak_fb, dummy_peer, state_change_summary, ppp_result) + await full_node._finish_sync() diff --git a/chia/_tests/util/time_out_assert.py b/chia/_tests/util/time_out_assert.py index d2cf5a6945f2..a777c802675e 100644 --- a/chia/_tests/util/time_out_assert.py +++ b/chia/_tests/util/time_out_assert.py @@ -6,18 +6,41 @@ import logging import pathlib import time -from typing import TYPE_CHECKING, Any, Callable, ClassVar, cast, final +from collections.abc import Iterable +from inspect import getframeinfo, stack +from pathlib import Path +from typing import TYPE_CHECKING, Any, Callable, ClassVar, Protocol, TypeVar, cast, final import chia import chia._tests from chia._tests import ether -from chia._tests.util.misc import DataTypeProtocol, caller_file_and_line from chia.protocols.protocol_message_types import ProtocolMessageTypes from chia.util.timing import adjusted_timeout log = logging.getLogger(__name__) +T = TypeVar("T") + + +@dataclasses.dataclass(frozen=True) +class DataTypeProtocol(Protocol): + tag: ClassVar[str] + + line: int + path: Path + label: str + duration: float + limit: float + + __match_args__: ClassVar[tuple[str, ...]] = () + + @classmethod + def unmarshal(cls: type[T], marshalled: dict[str, Any]) -> T: ... + + def marshal(self) -> dict[str, Any]: ... + + @final @dataclasses.dataclass(frozen=True) class TimeOutAssertData: @@ -152,3 +175,17 @@ async def bool_f(): return True return bool_f + + +def caller_file_and_line(distance: int = 1, relative_to: Iterable[Path] = ()) -> tuple[str, int]: + caller = getframeinfo(stack()[distance + 1][0]) + + caller_path = Path(caller.filename) + options: list[str] = [caller_path.as_posix()] + for path in relative_to: + try: + options.append(caller_path.relative_to(path).as_posix()) + except ValueError: + pass + + return min(options, key=len), caller.lineno diff --git a/chia/_tests/wallet/cat_wallet/test_cat_wallet.py b/chia/_tests/wallet/cat_wallet/test_cat_wallet.py index 7989fe6ac687..c2cb9bdcbd73 100644 --- a/chia/_tests/wallet/cat_wallet/test_cat_wallet.py +++ b/chia/_tests/wallet/cat_wallet/test_cat_wallet.py @@ -54,7 +54,7 @@ async def test_cat_creation(wallet_environments: WalletTestFramework) -> None: full_node_api = wallet_environments.full_node wsm = wallet_environments.environments[0].wallet_state_manager wallet = wallet_environments.environments[0].xch_wallet - + wallet_node = wallet_environments.environments[0].node wallet_environments.environments[0].wallet_aliases = { "xch": 1, "cat": 2, @@ -138,7 +138,7 @@ async def test_cat_creation(wallet_environments: WalletTestFramework) -> None: await full_node_api.reorg_from_index_to_new_index( ReorgProtocol(uint32(height - 1), uint32(height + 1), bytes32(32 * b"1"), None) ) - + await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node, peak_height=uint32(height + 1)) # The "set_remainder" sections here are due to a peculiarity with how the creation method creates an incoming TX # The creation method is for testing purposes only so we're not going to bother fixing it for any real reason await wallet_environments.process_pending_states( @@ -459,7 +459,7 @@ async def test_cat_spend(wallet_environments: WalletTestFramework) -> None: await full_node_api.reorg_from_index_to_new_index( ReorgProtocol(uint32(height - 1), uint32(height + 1), bytes32(32 * b"1"), None) ) - await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node) + await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node, peak_height=uint32(height + 1)) await env_1.change_balances( { "cat": { diff --git a/chia/_tests/wallet/nft_wallet/test_nft_wallet.py b/chia/_tests/wallet/nft_wallet/test_nft_wallet.py index f5a5dcd841db..24291a7b7382 100644 --- a/chia/_tests/wallet/nft_wallet/test_nft_wallet.py +++ b/chia/_tests/wallet/nft_wallet/test_nft_wallet.py @@ -301,7 +301,8 @@ async def test_nft_wallet_creation_and_transfer(wallet_environments: WalletTestF ReorgProtocol(uint32(height - 1), uint32(height + 1), bytes32.zeros, None) ) await time_out_assert(60, full_node_api.full_node.blockchain.get_peak_height, height + 1) - await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0) + await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_0, peak_height=uint32(height + 1), timeout=10) + await full_node_api.wait_for_wallet_synced(wallet_node=wallet_node_1, peak_height=uint32(height + 1), timeout=10) await env_0.change_balances( { "xch": { diff --git a/chia/_tests/wallet/sync/test_wallet_sync.py b/chia/_tests/wallet/sync/test_wallet_sync.py index 7fc03d64eedf..26e096981ae2 100644 --- a/chia/_tests/wallet/sync/test_wallet_sync.py +++ b/chia/_tests/wallet/sync/test_wallet_sync.py @@ -19,6 +19,7 @@ from chia._tests.util.setup_nodes import OldSimulatorsAndWallets from chia._tests.util.time_out_assert import time_out_assert, time_out_assert_not_none from chia._tests.weight_proof.test_weight_proof import load_blocks_dont_validate +from chia.consensus.block_body_validation import ForkInfo from chia.consensus.block_record import BlockRecord from chia.consensus.block_rewards import calculate_base_farmer_reward, calculate_pool_reward from chia.consensus.constants import ConsensusConstants @@ -360,10 +361,11 @@ async def test_long_sync_wallet( sub_slot_iters, difficulty = get_next_sub_slot_iters_and_difficulty( full_node.constants, True, block_record, full_node.blockchain ) + fork_height = blocks_reorg[-num_blocks - 10].height - 1 await full_node.add_block_batch( blocks_reorg[-num_blocks - 10 : -1], PeerInfo("0.0.0.0", 0), - None, + ForkInfo(fork_height, fork_height, blocks_reorg[-num_blocks - 10].prev_header_hash), ValidationState(sub_slot_iters, difficulty, None), ) await full_node.add_block(blocks_reorg[-1]) @@ -402,8 +404,7 @@ async def test_wallet_reorg_sync( await wallet_server.start_client(PeerInfo(self_hostname, full_node_server.get_port()), None) # Insert 400 blocks - await full_node.add_block(default_400_blocks[0]) - await add_blocks_in_batches(default_400_blocks[1:], full_node) + await add_blocks_in_batches(default_400_blocks, full_node) # Farm few more with reward for _ in range(num_blocks - 1): await full_node_api.farm_new_transaction_block(FarmNewBlockProtocol(phs[0])) @@ -425,8 +426,7 @@ async def test_wallet_reorg_sync( num_blocks = 30 blocks_reorg = bt.get_consecutive_blocks(num_blocks, block_list_input=default_400_blocks[:-5]) - for block in blocks_reorg[-30:]: - await full_node.add_block(block) + await add_blocks_in_batches(blocks_reorg[-30:], full_node, blocks_reorg[-30].prev_header_hash) for wallet_node, wallet_server in wallets: wallet = wallet_node.wallet_state_manager.main_wallet @@ -482,7 +482,7 @@ async def test_wallet_reorg_get_coinbase( await full_node.add_block_batch( blocks_reorg_2[-44:], PeerInfo("0.0.0.0", 0), - None, + ForkInfo(blocks_reorg_2[-45].height, blocks_reorg_2[-45].height, blocks_reorg_2[-45].header_hash), ValidationState(sub_slot_iters, difficulty, None), ) diff --git a/chia/consensus/blockchain.py b/chia/consensus/blockchain.py index 4152b191d789..76905c0f6ac1 100644 --- a/chia/consensus/blockchain.py +++ b/chia/consensus/blockchain.py @@ -4,7 +4,6 @@ import dataclasses import enum import logging -import time import traceback from concurrent.futures import Executor, ThreadPoolExecutor from enum import Enum @@ -283,7 +282,7 @@ async def add_block( pre_validation_result: PreValidationResult, bls_cache: Optional[BLSCache], sub_slot_iters: uint64, - fork_info: Optional[ForkInfo] = None, + fork_info: ForkInfo, prev_ses_block: Optional[BlockRecord] = None, ) -> tuple[AddBlockResult, Optional[Err], Optional[StateChangeSummary]]: """ @@ -338,78 +337,21 @@ async def add_block( header_hash: bytes32 = block.header_hash - # maybe fork_info should be mandatory to pass in, but we have a lot of - # tests that make sure the Blockchain object can handle any blocks, - # including orphaned ones, without any fork context - if fork_info is None: - block_rec = await self.get_block_record_from_db(header_hash) - if block_rec is not None: - self.add_block_record(block_rec) - # this means we have already seen and validated this block. - return AddBlockResult.ALREADY_HAVE_BLOCK, None, None - elif extending_main_chain: - # this is the common and efficient case where we extend the main - # chain. The fork_info can be empty - prev_height = block.height - 1 - fork_info = ForkInfo(prev_height, prev_height, block.prev_header_hash) - else: - assert peak is not None - # the block is extending a fork, and we don't have any fork_info - # for it. This can potentially be quite expensive and we should - # try to avoid getting here - - # first, collect all the block hashes of the forked chain - # the block we're trying to add doesn't exist in the chain yet, - # so we need to start traversing from its prev_header_hash - fork_chain, fork_hash = await lookup_fork_chain( - self, - (peak.height, peak.header_hash), - (block.height - 1, block.prev_header_hash), - self.constants, - ) - # now we know how long the fork is, and can compute the fork - # height. - fork_height = block.height - len(fork_chain) - 1 - fork_info = ForkInfo(fork_height, fork_height, fork_hash) - - log.warning( - f"slow path in block validation. Building coin set for fork ({fork_height}, {block.height})" - ) - - # now run all the blocks of the fork to compute the additions - # and removals. They are recorded in the fork_info object - counter = 0 - start = time.monotonic() - for height in range(fork_info.fork_height + 1, block.height): - fork_block: Optional[FullBlock] = await self.block_store.get_full_block(fork_chain[uint32(height)]) - assert fork_block is not None - assert fork_block.height - 1 == fork_info.peak_height - assert fork_block.height == 0 or fork_block.prev_header_hash == fork_info.peak_hash - await self.run_single_block(fork_block, fork_info) - counter += 1 - end = time.monotonic() - log.info( - f"executed {counter} block generators in {end - start:2f} s. " - f"{len(fork_info.additions_since_fork)} additions, " - f"{len(fork_info.removals_since_fork)} removals" - ) - - else: - if extending_main_chain: - fork_info.reset(block.height - 1, block.prev_header_hash) - - block_rec = await self.get_block_record_from_db(header_hash) - if block_rec is not None: - # We have already validated the block, but if it's not part of the - # main chain, we still need to re-run it to update the additions and - # removals in fork_info. - await self.advance_fork_info(block, fork_info) - fork_info.include_spends(pre_validation_result.conds, block, header_hash) - self.add_block_record(block_rec) - return AddBlockResult.ALREADY_HAVE_BLOCK, None, None - - if fork_info.peak_hash != block.prev_header_hash: - await self.advance_fork_info(block, fork_info) + if extending_main_chain: + fork_info.reset(block.height - 1, block.prev_header_hash) + + block_rec = await self.get_block_record_from_db(header_hash) + if block_rec is not None: + # We have already validated the block, but if it's not part of the + # main chain, we still need to re-run it to update the additions and + # removals in fork_info. + await self.advance_fork_info(block, fork_info) + fork_info.include_spends(pre_validation_result.conds, block, header_hash) + self.add_block_record(block_rec) + return AddBlockResult.ALREADY_HAVE_BLOCK, None, None + + if fork_info.peak_hash != block.prev_header_hash: + await self.advance_fork_info(block, fork_info) # if these prerequisites of the fork_info aren't met, the fork_info # object is invalid for this block. If the caller would have passed in diff --git a/chia/full_node/full_node.py b/chia/full_node/full_node.py index b47715b79393..0de2ae677de0 100644 --- a/chia/full_node/full_node.py +++ b/chia/full_node/full_node.py @@ -580,6 +580,12 @@ async def short_sync_batch(self, peer: WSChiaConnection, start_height: uint32, t try: peer_info = peer.get_peer_logging() + if start_height > 0: + fork_hash = self.blockchain.height_to_hash(uint32(start_height - 1)) + else: + fork_hash = self.constants.GENESIS_CHALLENGE + assert fork_hash + fork_info = ForkInfo(start_height - 1, start_height - 1, fork_hash) for height in range(start_height, target_height, batch_size): end_height = min(target_height, height + batch_size) request = RequestBlocks(uint32(height), uint32(end_height), True) @@ -598,7 +604,7 @@ async def short_sync_batch(self, peer: WSChiaConnection, start_height: uint32, t ) vs = ValidationState(ssi, diff, None) success, state_change_summary, err = await self.add_block_batch( - response.blocks, peer_info, None, vs + response.blocks, peer_info, fork_info, vs ) if not success: raise ValueError(f"Error short batch syncing, failed to validate blocks {height}-{end_height}") @@ -1148,21 +1154,12 @@ async def validate_block_batches( # for deep reorgs peak: Optional[BlockRecord] if fork_info is None: - peak = self.blockchain.get_peak() - extending_main_chain: bool = peak is None or ( - peak.header_hash == blocks[0].prev_header_hash or peak.header_hash == blocks[0].header_hash - ) - # if we're simply extending the main chain, it's important - # *not* to pass in a ForkInfo object, as it can potentially - # accrue a large state (with no value, since we can validate - # against the CoinStore) - if not extending_main_chain: - if fork_point_height == 0: - fork_info = ForkInfo(-1, -1, self.constants.GENESIS_CHALLENGE) - else: - fork_hash = self.blockchain.height_to_hash(uint32(fork_point_height - 1)) - assert fork_hash is not None - fork_info = ForkInfo(fork_point_height - 1, fork_point_height - 1, fork_hash) + if fork_point_height > 0: + fork_hash = self.blockchain.height_to_hash(uint32(fork_point_height - 1)) + assert fork_hash is not None + else: + fork_hash = self.constants.GENESIS_CHALLENGE + fork_info = ForkInfo(fork_point_height - 1, fork_point_height - 1, fork_hash) # The ValidationState object (vs) is an in-out parameter. the add_block_batch() # call will update it @@ -1270,7 +1267,7 @@ async def add_block_batch( self, all_blocks: list[FullBlock], peer_info: PeerInfo, - fork_info: Optional[ForkInfo], + fork_info: ForkInfo, vs: ValidationState, # in-out parameter wp_summaries: Optional[list[SubEpochSummary]] = None, ) -> tuple[bool, Optional[StateChangeSummary], Optional[Err]]: @@ -1294,16 +1291,10 @@ async def add_block_batch( if block_rec.sub_epoch_summary_included.new_difficulty is not None: vs.current_difficulty = block_rec.sub_epoch_summary_included.new_difficulty - if fork_info is None: - continue # the below section updates the fork_info object, if # there is one. - - # TODO: it seems unnecessary to request overlapping block ranges - # when syncing if block.height <= fork_info.peak_height: continue - # we have already validated this block once, no need to do it again. # however, if this block is not part of the main chain, we need to # update the fork context with its additions and removals @@ -1751,7 +1742,6 @@ async def add_block( peer: Optional[WSChiaConnection] = None, bls_cache: Optional[BLSCache] = None, raise_on_disconnected: bool = False, - fork_info: Optional[ForkInfo] = None, ) -> Optional[Message]: """ Add a full block from a peer full node (or ourselves). @@ -1883,6 +1873,7 @@ async def add_block( pre_validation_results[0] if pre_validation_result is None else pre_validation_result ) assert result_to_validate.required_iters == pre_validation_results[0].required_iters + fork_info = ForkInfo(block.height - 1, block.height - 1, block.prev_header_hash) (added, error_code, state_change_summary) = await self.blockchain.add_block( block, result_to_validate, bls_cache, ssi, fork_info ) diff --git a/chia/simulator/full_node_simulator.py b/chia/simulator/full_node_simulator.py index b791b8ecc611..d8a100c5b0e6 100644 --- a/chia/simulator/full_node_simulator.py +++ b/chia/simulator/full_node_simulator.py @@ -8,6 +8,8 @@ import anyio +from chia._tests.util.misc import add_blocks_in_batches +from chia.consensus.block_body_validation import ForkInfo from chia.consensus.block_record import BlockRecord from chia.consensus.block_rewards import calculate_base_farmer_reward, calculate_pool_reward from chia.consensus.blockchain import BlockchainMutexPriority @@ -182,11 +184,13 @@ async def farm_new_transaction_block( validate_signatures=True, ) assert pre_validation_results is not None + fork_info = ForkInfo(-1, -1, self.full_node.constants.GENESIS_CHALLENGE) await self.full_node.blockchain.add_block( genesis, pre_validation_results[0], self.full_node._bls_cache, self.full_node.constants.SUB_SLOT_ITERS_STARTING, + fork_info, ) peak = self.full_node.blockchain.get_peak() @@ -243,11 +247,9 @@ async def farm_new_block(self, request: FarmNewBlockProtocol, force_wait_for_tim validate_signatures=True, ) assert pre_validation_results is not None + fork_info = ForkInfo(-1, -1, self.full_node.constants.GENESIS_CHALLENGE) await self.full_node.blockchain.add_block( - genesis, - pre_validation_results[0], - self.full_node._bls_cache, - ssi, + genesis, pre_validation_results[0], self.full_node._bls_cache, ssi, fork_info ) peak = self.full_node.blockchain.get_peak() assert peak is not None @@ -302,9 +304,7 @@ async def reorg_from_index_to_new_index(self, request: ReorgProtocol): guarantee_transaction_block=True, seed=seed, ) - - for block in more_blocks: - await self.full_node.add_block(block) + await add_blocks_in_batches(more_blocks, self.full_node, current_blocks[old_index].header_hash) async def farm_blocks_to_puzzlehash( self, diff --git a/tools/test_full_sync.py b/tools/test_full_sync.py index 20752d8055ed..4701ba8d3ab1 100755 --- a/tools/test_full_sync.py +++ b/tools/test_full_sync.py @@ -13,6 +13,7 @@ from chia._tests.util.full_sync import FakePeer, FakeServer, run_sync_test from chia.cmds.init_funcs import chia_init +from chia.consensus.block_body_validation import ForkInfo from chia.consensus.constants import replace_str_to_bytes from chia.consensus.default_constants import DEFAULT_CONSTANTS from chia.consensus.difficulty_adjustment import get_next_sub_slot_iters_and_difficulty @@ -159,8 +160,15 @@ async def run_sync_checkpoint( ssi, diff = get_next_sub_slot_iters_and_difficulty( full_node.constants, True, block_record, full_node.blockchain ) + + fork_height = block_batch[0].height - 1 + header_hash = block_batch[0].prev_header_hash + success, _, err = await full_node.add_block_batch( - block_batch, peer_info, None, ValidationState(ssi, diff, None) + block_batch, + peer_info, + ForkInfo(fork_height, fork_height, header_hash), + ValidationState(ssi, diff, None), ) end_height = block_batch[-1].height full_node.blockchain.clean_block_record(end_height - full_node.constants.BLOCKS_CACHE_SIZE) @@ -177,8 +185,13 @@ async def run_sync_checkpoint( ssi, diff = get_next_sub_slot_iters_and_difficulty( full_node.constants, True, block_record, full_node.blockchain ) + fork_height = block_batch[0].height - 1 + fork_header_hash = block_batch[0].prev_header_hash success, _, err = await full_node.add_block_batch( - block_batch, peer_info, None, ValidationState(ssi, diff, None) + block_batch, + peer_info, + ForkInfo(fork_height, fork_height, fork_header_hash), + ValidationState(ssi, diff, None), ) if not success: raise RuntimeError("failed to ingest block batch") From 45b08d60e3045ad1d7223bcb35dddbb0e25c9fab Mon Sep 17 00:00:00 2001 From: Arvid Norberg Date: Tue, 22 Oct 2024 21:15:07 +0200 Subject: [PATCH 54/69] Additions removals (#18741) * use additions_and_removals() in the request_block_header() full node API. This is slightly faster, and untangles it from the function used for consensus * use additions_and_removals() instead of get_name_puzzle_conditions() in run_one_block(). We've already validated the block, we just need to recompute the additions and removals --- chia/consensus/block_body_validation.py | 30 +++++++++++++++++++++ chia/consensus/blockchain.py | 21 +++++++-------- chia/full_node/full_node_api.py | 35 +++++++++++++++---------- 3 files changed, 61 insertions(+), 25 deletions(-) diff --git a/chia/consensus/block_body_validation.py b/chia/consensus/block_body_validation.py index ba9d89d3c68c..017f916362c0 100644 --- a/chia/consensus/block_body_validation.py +++ b/chia/consensus/block_body_validation.py @@ -111,6 +111,36 @@ def include_spends(self, conds: Optional[SpendBundleConditions], block: FullBloc assert coin.name() not in self.additions_since_fork self.additions_since_fork[coin.name()] = ForkAdd(coin, block.height, timestamp, None, True) + def include_block( + self, + additions: list[tuple[Coin, Optional[bytes]]], + removals: list[Coin], + block: FullBlock, + header_hash: bytes32, + ) -> None: + height = block.height + + assert self.peak_height == height - 1 + + assert len(self.block_hashes) == self.peak_height - self.fork_height + assert block.height == self.fork_height + 1 + len(self.block_hashes) + self.block_hashes.append(header_hash) + + self.peak_height = int(block.height) + self.peak_hash = header_hash + + if block.foliage_transaction_block is not None: + timestamp = block.foliage_transaction_block.timestamp + for spend in removals: + self.removals_since_fork[bytes32(spend.name())] = ForkRem(bytes32(spend.puzzle_hash), height) + for coin, hint in additions: + self.additions_since_fork[coin.name()] = ForkAdd(coin, height, timestamp, hint, False) + for coin in block.get_included_reward_coins(): + assert block.foliage_transaction_block is not None + timestamp = block.foliage_transaction_block.timestamp + assert coin.name() not in self.additions_since_fork + self.additions_since_fork[coin.name()] = ForkAdd(coin, block.height, timestamp, None, True) + def rollback(self, header_hash: bytes32, height: int) -> None: assert height <= self.peak_height self.peak_height = height diff --git a/chia/consensus/blockchain.py b/chia/consensus/blockchain.py index 76905c0f6ac1..9c3f31006b2d 100644 --- a/chia/consensus/blockchain.py +++ b/chia/consensus/blockchain.py @@ -10,7 +10,7 @@ from pathlib import Path from typing import TYPE_CHECKING, ClassVar, Optional, cast -from chia_rs import BLSCache +from chia_rs import BLSCache, additions_and_removals, get_flags_for_height_and_constants from chia.consensus.block_body_validation import ForkInfo, validate_block_body from chia.consensus.block_header_validation import validate_unfinished_header_block @@ -25,7 +25,6 @@ from chia.full_node.block_height_map import BlockHeightMap from chia.full_node.block_store import BlockStore from chia.full_node.coin_store import CoinStore -from chia.full_node.mempool_check_conditions import get_name_puzzle_conditions from chia.types.blockchain_format.coin import Coin from chia.types.blockchain_format.sized_bytes import bytes32 from chia.types.blockchain_format.sub_epoch_summary import SubEpochSummary @@ -259,22 +258,22 @@ async def run_single_block(self, block: FullBlock, fork_info: ForkInfo) -> None: assert fork_info.peak_height == block.height - 1 assert block.height == 0 or fork_info.peak_hash == block.prev_header_hash - npc: Optional[NPCResult] = None + additions: list[tuple[Coin, Optional[bytes]]] = [] + removals: list[Coin] = [] if block.transactions_generator is not None: block_generator: Optional[BlockGenerator] = await get_block_generator(self.lookup_block_generators, block) assert block_generator is not None assert block.transactions_info is not None assert block.foliage_transaction_block is not None - npc = get_name_puzzle_conditions( - block_generator, - block.transactions_info.cost, - mempool_mode=False, - height=block.height, - constants=self.constants, + flags = get_flags_for_height_and_constants(block.height, self.constants) + additions, removals = additions_and_removals( + bytes(block.transactions_generator), + block_generator.generator_refs, + flags, + self.constants, ) - assert npc.error is None - fork_info.include_spends(None if npc is None else npc.conds, block, block.header_hash) + fork_info.include_block(additions, removals, block, block.header_hash) async def add_block( self, diff --git a/chia/full_node/full_node_api.py b/chia/full_node/full_node_api.py index 50cef57c2fff..55f5b2301f99 100644 --- a/chia/full_node/full_node_api.py +++ b/chia/full_node/full_node_api.py @@ -1,7 +1,6 @@ from __future__ import annotations import asyncio -import functools import logging import time import traceback @@ -10,7 +9,14 @@ from typing import TYPE_CHECKING, Optional, cast import anyio -from chia_rs import AugSchemeMPL, G1Element, G2Element, MerkleSet +from chia_rs import ( + AugSchemeMPL, + G1Element, + G2Element, + MerkleSet, + additions_and_removals, + get_flags_for_height_and_constants, +) from chiabip158 import PyBIP158 from chia.consensus.block_creation import create_unfinished_block @@ -22,7 +28,7 @@ from chia.full_node.coin_store import CoinStore from chia.full_node.fee_estimate import FeeEstimate, FeeEstimateGroup, fee_rate_v2_to_v1 from chia.full_node.fee_estimator_interface import FeeEstimatorInterface -from chia.full_node.mempool_check_conditions import get_name_puzzle_conditions, get_puzzle_and_solution_for_coin +from chia.full_node.mempool_check_conditions import get_puzzle_and_solution_for_coin from chia.full_node.signage_point import SignagePoint from chia.full_node.tx_processing_queue import TransactionQueueFull from chia.protocols import farmer_protocol, full_node_protocol, introducer_protocol, timelord_protocol, wallet_protocol @@ -62,7 +68,7 @@ from chia.util.batches import to_batches from chia.util.db_wrapper import SQLITE_MAX_VARIABLE_NUMBER from chia.util.full_block_utils import header_block_from_block -from chia.util.generator_tools import get_block_header, tx_removals_and_additions +from chia.util.generator_tools import get_block_header from chia.util.hash import std_hash from chia.util.ints import uint8, uint32, uint64, uint128 from chia.util.limited_semaphore import LimitedSemaphoreFullError @@ -1195,19 +1201,20 @@ async def request_block_header(self, request: wallet_protocol.RequestBlockHeader # transactions_generator, so the block_generator should always be set assert block_generator is not None, "failed to get block_generator for tx-block" - npc_result = await asyncio.get_running_loop().run_in_executor( + flags = get_flags_for_height_and_constants(request.height, self.full_node.constants) + additions, removals = await asyncio.get_running_loop().run_in_executor( self.executor, - functools.partial( - get_name_puzzle_conditions, - block_generator, - self.full_node.constants.MAX_BLOCK_COST_CLVM, - mempool_mode=False, - height=request.height, - constants=self.full_node.constants, - ), + additions_and_removals, + bytes(block.transactions_generator), + block_generator.generator_refs, + flags, + self.full_node.constants, ) + # strip the hint from additions, and compute the puzzle hash for + # removals + tx_additions = [add[0] for add in additions] + tx_removals = [rem.name() for rem in removals] - tx_removals, tx_additions = tx_removals_and_additions(npc_result.conds) header_block = get_block_header(block, tx_additions, tx_removals) msg = make_msg( ProtocolMessageTypes.respond_block_header, From 4f00f639fc86ca01e75de6b50a9107236c184ac3 Mon Sep 17 00:00:00 2001 From: Amine Khaldi Date: Tue, 22 Oct 2024 20:15:26 +0100 Subject: [PATCH 55/69] CHIA-1565 Pass a ValidationState to validate_finished_header_block (#18712) Pass a ValidationState to validate_finished_header_block. --- chia/_tests/blockchain/test_blockchain.py | 76 +++++++---------------- chia/consensus/block_header_validation.py | 17 +++-- chia/consensus/multiprocess_validation.py | 4 +- chia/full_node/weight_proof.py | 4 +- chia/wallet/wallet_blockchain.py | 6 +- 5 files changed, 37 insertions(+), 70 deletions(-) diff --git a/chia/_tests/blockchain/test_blockchain.py b/chia/_tests/blockchain/test_blockchain.py index ae86f87b1399..55bbbe453a6d 100644 --- a/chia/_tests/blockchain/test_blockchain.py +++ b/chia/_tests/blockchain/test_blockchain.py @@ -172,13 +172,9 @@ async def test_long_chain(self, empty_blockchain: Blockchain, default_1000_block # TODO: Inspect these block values as they are currently None expected_difficulty = block.finished_sub_slots[0].challenge_chain.new_difficulty or uint64(0) expected_sub_slot_iters = block.finished_sub_slots[0].challenge_chain.new_sub_slot_iters or uint64(0) + vs = ValidationState(expected_sub_slot_iters, expected_difficulty, None) _, error = validate_finished_header_block( - empty_blockchain.constants, - empty_blockchain, - header_block_bad, - False, - expected_difficulty, - expected_sub_slot_iters, + empty_blockchain.constants, empty_blockchain, header_block_bad, False, vs ) assert error is not None assert error.code == Err.INVALID_NEW_SUB_SLOT_ITERS @@ -199,13 +195,9 @@ async def test_long_chain(self, empty_blockchain: Blockchain, default_1000_block # TODO: Inspect these block values as they are currently None expected_difficulty = block.finished_sub_slots[0].challenge_chain.new_difficulty or uint64(0) expected_sub_slot_iters = block.finished_sub_slots[0].challenge_chain.new_sub_slot_iters or uint64(0) + vs = ValidationState(expected_sub_slot_iters, expected_difficulty, None) _, error = validate_finished_header_block( - empty_blockchain.constants, - empty_blockchain, - header_block_bad_2, - False, - expected_difficulty, - expected_sub_slot_iters, + empty_blockchain.constants, empty_blockchain, header_block_bad_2, False, vs ) assert error is not None assert error.code == Err.INVALID_NEW_DIFFICULTY @@ -233,13 +225,9 @@ async def test_long_chain(self, empty_blockchain: Blockchain, default_1000_block # TODO: Inspect these block values as they are currently None expected_difficulty = block.finished_sub_slots[0].challenge_chain.new_difficulty or uint64(0) expected_sub_slot_iters = block.finished_sub_slots[0].challenge_chain.new_sub_slot_iters or uint64(0) + vs = ValidationState(expected_sub_slot_iters, expected_difficulty, None) _, error = validate_finished_header_block( - empty_blockchain.constants, - empty_blockchain, - header_block_bad_3, - False, - expected_difficulty, - expected_sub_slot_iters, + empty_blockchain.constants, empty_blockchain, header_block_bad_3, False, vs ) assert error is not None assert error.code == Err.INVALID_SUB_EPOCH_SUMMARY @@ -266,13 +254,9 @@ async def test_long_chain(self, empty_blockchain: Blockchain, default_1000_block # TODO: Inspect these block values as they are currently None expected_difficulty = block.finished_sub_slots[0].challenge_chain.new_difficulty or uint64(0) expected_sub_slot_iters = block.finished_sub_slots[0].challenge_chain.new_sub_slot_iters or uint64(0) + vs = ValidationState(expected_sub_slot_iters, expected_difficulty, None) _, error = validate_finished_header_block( - empty_blockchain.constants, - empty_blockchain, - header_block_bad_4, - False, - expected_difficulty, - expected_sub_slot_iters, + empty_blockchain.constants, empty_blockchain, header_block_bad_4, False, vs ) assert error is not None assert error.code == Err.INVALID_SUB_EPOCH_SUMMARY @@ -523,13 +507,11 @@ async def test_invalid_sub_slot_challenge_hash_genesis(self, empty_blockchain: B ) header_block_bad = get_block_header(block_0_bad, [], []) + vs = ValidationState( + empty_blockchain.constants.SUB_SLOT_ITERS_STARTING, empty_blockchain.constants.DIFFICULTY_STARTING, None + ) _, error = validate_finished_header_block( - empty_blockchain.constants, - empty_blockchain, - header_block_bad, - False, - empty_blockchain.constants.DIFFICULTY_STARTING, - empty_blockchain.constants.SUB_SLOT_ITERS_STARTING, + empty_blockchain.constants, empty_blockchain, header_block_bad, False, vs ) assert error is not None @@ -557,13 +539,9 @@ async def test_invalid_sub_slot_challenge_hash_non_genesis( # TODO: Inspect these block values as they are currently None expected_difficulty = blocks[1].finished_sub_slots[0].challenge_chain.new_difficulty or uint64(0) expected_sub_slot_iters = blocks[1].finished_sub_slots[0].challenge_chain.new_sub_slot_iters or uint64(0) + vs = ValidationState(expected_sub_slot_iters, expected_difficulty, None) _, error = validate_finished_header_block( - empty_blockchain.constants, - empty_blockchain, - header_block_bad, - False, - expected_difficulty, - expected_sub_slot_iters, + empty_blockchain.constants, empty_blockchain, header_block_bad, False, vs ) assert error is not None assert error.code == Err.INVALID_PREV_CHALLENGE_SLOT_HASH @@ -588,13 +566,9 @@ async def test_invalid_sub_slot_challenge_hash_empty_ss(self, empty_blockchain: # TODO: Inspect these block values as they are currently None expected_difficulty = blocks[1].finished_sub_slots[0].challenge_chain.new_difficulty or uint64(0) expected_sub_slot_iters = blocks[1].finished_sub_slots[0].challenge_chain.new_sub_slot_iters or uint64(0) + vs = ValidationState(expected_sub_slot_iters, expected_difficulty, None) _, error = validate_finished_header_block( - empty_blockchain.constants, - empty_blockchain, - header_block_bad, - False, - expected_difficulty, - expected_sub_slot_iters, + empty_blockchain.constants, empty_blockchain, header_block_bad, False, vs ) assert error is not None assert error.code == Err.INVALID_PREV_CHALLENGE_SLOT_HASH @@ -747,13 +721,9 @@ async def test_invalid_icc_into_cc(self, empty_blockchain: Blockchain, bt: Block # TODO: Inspect these block values as they are currently None expected_difficulty = block.finished_sub_slots[0].challenge_chain.new_difficulty or uint64(0) expected_sub_slot_iters = block.finished_sub_slots[0].challenge_chain.new_sub_slot_iters or uint64(0) + vs = ValidationState(expected_sub_slot_iters, expected_difficulty, None) _, error = validate_finished_header_block( - empty_blockchain.constants, - empty_blockchain, - header_block_bad, - False, - expected_difficulty, - expected_sub_slot_iters, + empty_blockchain.constants, empty_blockchain, header_block_bad, False, vs ) assert error is not None assert error.code == Err.INVALID_ICC_HASH_CC @@ -819,13 +789,11 @@ async def test_empty_slot_no_ses(self, empty_blockchain: Blockchain, bt: BlockTo ) header_block_bad = get_block_header(block_bad, [], []) + vs = ValidationState( + empty_blockchain.constants.SUB_SLOT_ITERS_STARTING, empty_blockchain.constants.DIFFICULTY_STARTING, None + ) _, error = validate_finished_header_block( - empty_blockchain.constants, - empty_blockchain, - header_block_bad, - False, - empty_blockchain.constants.DIFFICULTY_STARTING, - empty_blockchain.constants.SUB_SLOT_ITERS_STARTING, + empty_blockchain.constants, empty_blockchain, header_block_bad, False, vs ) assert error is not None assert error.code == Err.INVALID_SUB_EPOCH_SUMMARY_HASH diff --git a/chia/consensus/block_header_validation.py b/chia/consensus/block_header_validation.py index e2e5f3c19b67..4f1bd396463e 100644 --- a/chia/consensus/block_header_validation.py +++ b/chia/consensus/block_header_validation.py @@ -29,6 +29,7 @@ from chia.types.end_of_slot_bundle import EndOfSubSlotBundle from chia.types.header_block import HeaderBlock from chia.types.unfinished_header_block import UnfinishedHeaderBlock +from chia.types.validation_state import ValidationState from chia.util.errors import Err, ValidationError from chia.util.hash import std_hash from chia.util.ints import uint8, uint32, uint64, uint128 @@ -834,10 +835,8 @@ def validate_finished_header_block( blocks: BlockRecordsProtocol, header_block: HeaderBlock, check_filter: bool, - expected_difficulty: uint64, - expected_sub_slot_iters: uint64, + vs: ValidationState, check_sub_epoch_summary: bool = True, - prev_ses_block: Optional[BlockRecord] = None, ) -> tuple[Optional[uint64], Optional[ValidationError]]: """ Fully validates the header of a block. A header block is the same as a full block, but @@ -858,11 +857,11 @@ def validate_finished_header_block( blocks, unfinished_header_block, check_filter, - expected_difficulty, - expected_sub_slot_iters, + vs.current_difficulty, + vs.current_ssi, False, check_sub_epoch_summary=check_sub_epoch_summary, - prev_ses_block=prev_ses_block, + prev_ses_block=vs.prev_ses_block, ) genesis_block = False @@ -880,7 +879,7 @@ def validate_finished_header_block( ip_iters: uint64 = calculate_ip_iters( constants, - expected_sub_slot_iters, + vs.current_ssi, header_block.reward_chain_block.signage_point_index, required_iters, ) @@ -891,8 +890,8 @@ def validate_finished_header_block( return None, ValidationError(Err.INVALID_HEIGHT) # 28. Check weight - if header_block.weight != prev_b.weight + expected_difficulty: - log.error(f"INVALID WEIGHT: {header_block} {prev_b} {expected_difficulty}") + if header_block.weight != prev_b.weight + vs.current_difficulty: + log.error(f"INVALID WEIGHT: {header_block} {prev_b} {vs.current_difficulty}") return None, ValidationError(Err.INVALID_WEIGHT) else: # 27b. Check genesis block height, weight, and prev block hash diff --git a/chia/consensus/multiprocess_validation.py b/chia/consensus/multiprocess_validation.py index a4a1e542bc8b..2450135afb71 100644 --- a/chia/consensus/multiprocess_validation.py +++ b/chia/consensus/multiprocess_validation.py @@ -94,9 +94,7 @@ def pre_validate_block( blockchain, header_block, True, # check_filter - vs.current_difficulty, - vs.current_ssi, - prev_ses_block=vs.prev_ses_block, + vs, ) error_int: Optional[uint16] = None if error is not None: diff --git a/chia/full_node/weight_proof.py b/chia/full_node/weight_proof.py index f94309712731..f5f2c29dd713 100644 --- a/chia/full_node/weight_proof.py +++ b/chia/full_node/weight_proof.py @@ -31,6 +31,7 @@ from chia.types.blockchain_format.vdf import VDFInfo, VDFProof, validate_vdf from chia.types.end_of_slot_bundle import EndOfSubSlotBundle from chia.types.header_block import HeaderBlock +from chia.types.validation_state import ValidationState from chia.types.weight_proof import ( RecentChainData, SubEpochChallengeSegment, @@ -1253,8 +1254,9 @@ def validate_recent_blocks( adjusted = True deficit = get_deficit(constants, deficit, prev_block_record, overflow, len(block.finished_sub_slots)) if sub_slots > 2 and transaction_blocks > 11 and (tip_height - block.height < last_blocks_to_validate): + vs = ValidationState(ssi, diff, None) caluclated_required_iters, error = validate_finished_header_block( - constants, sub_blocks, block, False, diff, ssi, ses_blocks > 2 + constants, sub_blocks, block, False, vs, ses_blocks > 2 ) if error is not None: log.error(f"block {block.header_hash} failed validation {error}") diff --git a/chia/wallet/wallet_blockchain.py b/chia/wallet/wallet_blockchain.py index 24e7036c8596..11bea1374760 100644 --- a/chia/wallet/wallet_blockchain.py +++ b/chia/wallet/wallet_blockchain.py @@ -11,6 +11,7 @@ from chia.consensus.full_block_to_block_record import block_to_block_record from chia.types.blockchain_format.sized_bytes import bytes32 from chia.types.header_block import HeaderBlock +from chia.types.validation_state import ValidationState from chia.types.weight_proof import WeightProof from chia.util.errors import Err from chia.util.ints import uint32, uint64 @@ -110,9 +111,8 @@ async def add_block(self, block: HeaderBlock) -> tuple[AddBlockResult, Optional[ difficulty = self._difficulty # Validation requires a block cache (self) that goes back to a subepoch barrier - required_iters, error = validate_finished_header_block( - self.constants, self, block, False, difficulty, sub_slot_iters, False - ) + vs = ValidationState(sub_slot_iters, difficulty, None) + required_iters, error = validate_finished_header_block(self.constants, self, block, False, vs, False) if error is not None: return AddBlockResult.INVALID_BLOCK, error.code if required_iters is None: From 6bbcd2d0650b66ea5f59a81d8b69fb7a3946b742 Mon Sep 17 00:00:00 2001 From: Arvid Norberg Date: Tue, 22 Oct 2024 21:16:12 +0200 Subject: [PATCH 56/69] tests as its own subproject (#18698) extend virtual_project_analysis to treat all files under chia/_tests as its own subproject. Since the tests depend on the production code,this will prevent production code from depending on tests --- chia/util/virtual_project_analysis.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/chia/util/virtual_project_analysis.py b/chia/util/virtual_project_analysis.py index 756e4b1b25e9..6422dbe74f16 100644 --- a/chia/util/virtual_project_analysis.py +++ b/chia/util/virtual_project_analysis.py @@ -49,6 +49,12 @@ class ChiaFile: @classmethod def parse(cls, file_path: Path) -> ChiaFile: + # everything under chia/_tests belong to the "tests" subproject. It + # (obviously) depends on everything, but no production code is allowed + # to depend back on the tests. + if list(file_path.parts[0:2]) == ["chia", "_tests"]: + return cls(file_path, Annotation("tests", True)) + with open(file_path, encoding="utf-8", errors="ignore") as f: file_string = f.read().strip() return cls(file_path, Annotation.parse(file_string)) From 8c72d22edf0257e53ece9af54801ccc348472933 Mon Sep 17 00:00:00 2001 From: Arvid Norberg Date: Wed, 23 Oct 2024 15:50:08 +0200 Subject: [PATCH 57/69] move add_blocks_in_batches() out of tests (#18750) move add_blocks_in_batches() out of tests, since the node simulator depends on it --- .../test_blockchain_transactions.py | 2 +- chia/_tests/core/full_node/test_full_node.py | 3 +- .../core/mempool/test_mempool_performance.py | 3 +- chia/_tests/core/test_full_node_rpc.py | 2 +- chia/_tests/util/misc.py | 54 ------------------ chia/_tests/wallet/sync/test_wallet_sync.py | 3 +- chia/_tests/wallet/test_wallet_blockchain.py | 2 +- chia/_tests/wallet/test_wallet_node.py | 3 +- chia/simulator/add_blocks_in_batches.py | 56 +++++++++++++++++++ chia/simulator/full_node_simulator.py | 2 +- 10 files changed, 68 insertions(+), 62 deletions(-) create mode 100644 chia/simulator/add_blocks_in_batches.py diff --git a/chia/_tests/blockchain/test_blockchain_transactions.py b/chia/_tests/blockchain/test_blockchain_transactions.py index dfccb27f4aa4..8ac88bc03571 100644 --- a/chia/_tests/blockchain/test_blockchain_transactions.py +++ b/chia/_tests/blockchain/test_blockchain_transactions.py @@ -7,10 +7,10 @@ from chia._tests.blockchain.blockchain_test_utils import _validate_and_add_block from chia._tests.util.generator_tools_testing import run_and_get_removals_and_additions -from chia._tests.util.misc import add_blocks_in_batches from chia.full_node.full_node_api import FullNodeAPI from chia.protocols import wallet_protocol from chia.server.server import ChiaServer +from chia.simulator.add_blocks_in_batches import add_blocks_in_batches from chia.simulator.block_tools import BlockTools, test_constants from chia.simulator.wallet_tools import WalletTool from chia.types.blockchain_format.sized_bytes import bytes32 diff --git a/chia/_tests/core/full_node/test_full_node.py b/chia/_tests/core/full_node/test_full_node.py index 0079a7d58e1b..3dff7aeed53c 100644 --- a/chia/_tests/core/full_node/test_full_node.py +++ b/chia/_tests/core/full_node/test_full_node.py @@ -20,7 +20,7 @@ from chia._tests.core.full_node.stores.test_coin_store import get_future_reward_coins from chia._tests.core.make_block_generator import make_spend_bundle from chia._tests.core.node_height import node_height_at_least -from chia._tests.util.misc import add_blocks_in_batches, wallet_height_at_least +from chia._tests.util.misc import wallet_height_at_least from chia._tests.util.setup_nodes import SimulatorsAndWalletsServices from chia._tests.util.time_out_assert import time_out_assert, time_out_assert_custom_interval, time_out_messages from chia.consensus.block_body_validation import ForkInfo @@ -40,6 +40,7 @@ from chia.server.address_manager import AddressManager from chia.server.outbound_message import Message, NodeType from chia.server.server import ChiaServer +from chia.simulator.add_blocks_in_batches import add_blocks_in_batches from chia.simulator.block_tools import BlockTools, create_block_tools_async, get_signage_point, make_unfinished_block from chia.simulator.full_node_simulator import FullNodeSimulator from chia.simulator.keyring import TempKeyring diff --git a/chia/_tests/core/mempool/test_mempool_performance.py b/chia/_tests/core/mempool/test_mempool_performance.py index 54e3515780bf..1da2a1f1b33b 100644 --- a/chia/_tests/core/mempool/test_mempool_performance.py +++ b/chia/_tests/core/mempool/test_mempool_performance.py @@ -2,9 +2,10 @@ import pytest -from chia._tests.util.misc import BenchmarkRunner, add_blocks_in_batches, wallet_height_at_least +from chia._tests.util.misc import BenchmarkRunner, wallet_height_at_least from chia._tests.util.setup_nodes import OldSimulatorsAndWallets from chia._tests.util.time_out_assert import time_out_assert +from chia.simulator.add_blocks_in_batches import add_blocks_in_batches from chia.types.full_block import FullBlock from chia.types.mempool_inclusion_status import MempoolInclusionStatus from chia.types.peer_info import PeerInfo diff --git a/chia/_tests/core/test_full_node_rpc.py b/chia/_tests/core/test_full_node_rpc.py index 79ba7ff51628..a91de8854468 100644 --- a/chia/_tests/core/test_full_node_rpc.py +++ b/chia/_tests/core/test_full_node_rpc.py @@ -9,7 +9,6 @@ from chia._tests.blockchain.blockchain_test_utils import _validate_and_add_block from chia._tests.conftest import ConsensusMode from chia._tests.connection_utils import connect_and_get_peer -from chia._tests.util.misc import add_blocks_in_batches from chia._tests.util.rpc import validate_get_routes from chia._tests.util.time_out_assert import time_out_assert from chia.consensus.block_record import BlockRecord @@ -19,6 +18,7 @@ from chia.rpc.full_node_rpc_api import get_average_block_time, get_nearest_transaction_block from chia.rpc.full_node_rpc_client import FullNodeRpcClient from chia.server.outbound_message import NodeType +from chia.simulator.add_blocks_in_batches import add_blocks_in_batches from chia.simulator.block_tools import get_signage_point from chia.simulator.simulator_protocol import FarmNewBlockProtocol, ReorgProtocol from chia.simulator.wallet_tools import WalletTool diff --git a/chia/_tests/util/misc.py b/chia/_tests/util/misc.py index c80a89e21db5..751d9aacaa08 100644 --- a/chia/_tests/util/misc.py +++ b/chia/_tests/util/misc.py @@ -33,19 +33,11 @@ import chia import chia._tests from chia._tests import ether -from chia._tests.connection_utils import add_dummy_connection from chia._tests.core.data_layer.util import ChiaRoot from chia._tests.util.time_out_assert import DataTypeProtocol, caller_file_and_line -from chia.consensus.block_body_validation import ForkInfo -from chia.consensus.difficulty_adjustment import get_next_sub_slot_iters_and_difficulty -from chia.full_node.full_node import FullNode, PeakPostProcessingResult from chia.full_node.mempool import Mempool from chia.types.blockchain_format.sized_bytes import bytes32 from chia.types.condition_opcodes import ConditionOpcode -from chia.types.full_block import FullBlock -from chia.types.peer_info import PeerInfo -from chia.types.validation_state import ValidationState -from chia.util.batches import to_batches from chia.util.hash import std_hash from chia.util.ints import uint16, uint32, uint64 from chia.util.network import WebServer @@ -638,49 +630,3 @@ def __ge__(self: T_ComparableEnum, other: T_ComparableEnum) -> object: return NotImplemented return self.value.__ge__(other.value) - - -async def add_blocks_in_batches( - blocks: list[FullBlock], - full_node: FullNode, - header_hash: Optional[bytes32] = None, -) -> None: - if header_hash is None: - diff = full_node.constants.DIFFICULTY_STARTING - ssi = full_node.constants.SUB_SLOT_ITERS_STARTING - fork_height = -1 - fork_info = ForkInfo(-1, fork_height, full_node.constants.GENESIS_CHALLENGE) - else: - block_record = await full_node.blockchain.get_block_record_from_db(header_hash) - assert block_record is not None - ssi, diff = get_next_sub_slot_iters_and_difficulty( - full_node.constants, True, block_record, full_node.blockchain - ) - fork_height = block_record.height - fork_info = ForkInfo(block_record.height, fork_height, block_record.header_hash) - - _, dummy_node_id = await add_dummy_connection(full_node.server, "127.0.0.1", 12315) - dummy_peer = full_node.server.all_connections[dummy_node_id] - vs = ValidationState(ssi, diff, None) - - for block_batch in to_batches(blocks, 64): - b = block_batch.entries[0] - if (b.height % 128) == 0: - print(f"main chain: {b.height:4} weight: {b.weight}") - # vs is updated by the call to add_block_batch() - success, state_change_summary, err = await full_node.add_block_batch( - block_batch.entries, - PeerInfo("0.0.0.0", 0), - fork_info, - vs, - ) - assert err is None - assert success is True - if state_change_summary is not None: - peak_fb: Optional[FullBlock] = await full_node.blockchain.get_full_peak() - assert peak_fb is not None - ppp_result: PeakPostProcessingResult = await full_node.peak_post_processing( - peak_fb, state_change_summary, dummy_peer - ) - await full_node.peak_post_processing_2(peak_fb, dummy_peer, state_change_summary, ppp_result) - await full_node._finish_sync() diff --git a/chia/_tests/wallet/sync/test_wallet_sync.py b/chia/_tests/wallet/sync/test_wallet_sync.py index 26e096981ae2..76055d61da66 100644 --- a/chia/_tests/wallet/sync/test_wallet_sync.py +++ b/chia/_tests/wallet/sync/test_wallet_sync.py @@ -15,7 +15,7 @@ from chia._tests.connection_utils import disconnect_all, disconnect_all_and_reconnect from chia._tests.util.blockchain_mock import BlockchainMock -from chia._tests.util.misc import add_blocks_in_batches, wallet_height_at_least +from chia._tests.util.misc import wallet_height_at_least from chia._tests.util.setup_nodes import OldSimulatorsAndWallets from chia._tests.util.time_out_assert import time_out_assert, time_out_assert_not_none from chia._tests.weight_proof.test_weight_proof import load_blocks_dont_validate @@ -37,6 +37,7 @@ ) from chia.server.outbound_message import Message, make_msg from chia.server.ws_connection import WSChiaConnection +from chia.simulator.add_blocks_in_batches import add_blocks_in_batches from chia.simulator.simulator_protocol import FarmNewBlockProtocol from chia.types.blockchain_format.program import Program from chia.types.blockchain_format.sized_bytes import bytes32 diff --git a/chia/_tests/wallet/test_wallet_blockchain.py b/chia/_tests/wallet/test_wallet_blockchain.py index a2d92be0b08b..03b76a81c9a9 100644 --- a/chia/_tests/wallet/test_wallet_blockchain.py +++ b/chia/_tests/wallet/test_wallet_blockchain.py @@ -3,10 +3,10 @@ import pytest from chia._tests.util.db_connection import DBConnection -from chia._tests.util.misc import add_blocks_in_batches from chia._tests.util.setup_nodes import OldSimulatorsAndWallets from chia.consensus.blockchain import AddBlockResult from chia.protocols import full_node_protocol +from chia.simulator.add_blocks_in_batches import add_blocks_in_batches from chia.types.blockchain_format.vdf import VDFProof from chia.types.full_block import FullBlock from chia.types.header_block import HeaderBlock diff --git a/chia/_tests/wallet/test_wallet_node.py b/chia/_tests/wallet/test_wallet_node.py index 3997502241ba..41c8084efc44 100644 --- a/chia/_tests/wallet/test_wallet_node.py +++ b/chia/_tests/wallet/test_wallet_node.py @@ -10,13 +10,14 @@ import pytest from chia_rs import G1Element, PrivateKey -from chia._tests.util.misc import CoinGenerator, add_blocks_in_batches +from chia._tests.util.misc import CoinGenerator from chia._tests.util.setup_nodes import OldSimulatorsAndWallets from chia._tests.util.time_out_assert import time_out_assert from chia.protocols import wallet_protocol from chia.protocols.protocol_message_types import ProtocolMessageTypes from chia.protocols.wallet_protocol import CoinState from chia.server.outbound_message import Message, make_msg +from chia.simulator.add_blocks_in_batches import add_blocks_in_batches from chia.simulator.block_tools import test_constants from chia.types.blockchain_format.coin import Coin from chia.types.blockchain_format.sized_bytes import bytes32 diff --git a/chia/simulator/add_blocks_in_batches.py b/chia/simulator/add_blocks_in_batches.py new file mode 100644 index 000000000000..965e40f28186 --- /dev/null +++ b/chia/simulator/add_blocks_in_batches.py @@ -0,0 +1,56 @@ +from __future__ import annotations + +from typing import Optional + +from chia.consensus.block_body_validation import ForkInfo +from chia.consensus.difficulty_adjustment import get_next_sub_slot_iters_and_difficulty +from chia.full_node.full_node import FullNode, PeakPostProcessingResult +from chia.types.blockchain_format.sized_bytes import bytes32 +from chia.types.full_block import FullBlock +from chia.types.peer_info import PeerInfo +from chia.types.validation_state import ValidationState +from chia.util.batches import to_batches + + +async def add_blocks_in_batches( + blocks: list[FullBlock], + full_node: FullNode, + header_hash: Optional[bytes32] = None, +) -> None: + if header_hash is None: + diff = full_node.constants.DIFFICULTY_STARTING + ssi = full_node.constants.SUB_SLOT_ITERS_STARTING + fork_height = -1 + fork_info = ForkInfo(-1, fork_height, full_node.constants.GENESIS_CHALLENGE) + else: + block_record = await full_node.blockchain.get_block_record_from_db(header_hash) + assert block_record is not None + ssi, diff = get_next_sub_slot_iters_and_difficulty( + full_node.constants, True, block_record, full_node.blockchain + ) + fork_height = block_record.height + fork_info = ForkInfo(block_record.height, fork_height, block_record.header_hash) + + vs = ValidationState(ssi, diff, None) + + for block_batch in to_batches(blocks, 64): + b = block_batch.entries[0] + if (b.height % 128) == 0: + print(f"main chain: {b.height:4} weight: {b.weight}") + # vs is updated by the call to add_block_batch() + success, state_change_summary, err = await full_node.add_block_batch( + block_batch.entries, + PeerInfo("0.0.0.0", 0), + fork_info, + vs, + ) + assert err is None + assert success is True + if state_change_summary is not None: + peak_fb: Optional[FullBlock] = await full_node.blockchain.get_full_peak() + assert peak_fb is not None + ppp_result: PeakPostProcessingResult = await full_node.peak_post_processing( + peak_fb, state_change_summary, None + ) + await full_node.peak_post_processing_2(peak_fb, None, state_change_summary, ppp_result) + await full_node._finish_sync() diff --git a/chia/simulator/full_node_simulator.py b/chia/simulator/full_node_simulator.py index d8a100c5b0e6..ef42d7b0a15c 100644 --- a/chia/simulator/full_node_simulator.py +++ b/chia/simulator/full_node_simulator.py @@ -8,7 +8,6 @@ import anyio -from chia._tests.util.misc import add_blocks_in_batches from chia.consensus.block_body_validation import ForkInfo from chia.consensus.block_record import BlockRecord from chia.consensus.block_rewards import calculate_base_farmer_reward, calculate_pool_reward @@ -18,6 +17,7 @@ from chia.full_node.full_node_api import FullNodeAPI from chia.rpc.rpc_server import default_get_connections from chia.server.outbound_message import NodeType +from chia.simulator.add_blocks_in_batches import add_blocks_in_batches from chia.simulator.block_tools import BlockTools from chia.simulator.simulator_protocol import FarmNewBlockProtocol, GetAllCoinsProtocol, ReorgProtocol from chia.types.blockchain_format.coin import Coin From eb68369939e7d7dc51855183f6a60ca857fa9e71 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 23 Oct 2024 15:00:03 -0700 Subject: [PATCH 58/69] build(deps): bump coverage from 7.6.1 to 7.6.4 (#18748) Bumps [coverage](https://github.com/nedbat/coveragepy) from 7.6.1 to 7.6.4. - [Release notes](https://github.com/nedbat/coveragepy/releases) - [Changelog](https://github.com/nedbat/coveragepy/blob/master/CHANGES.rst) - [Commits](https://github.com/nedbat/coveragepy/compare/7.6.1...7.6.4) --- updated-dependencies: - dependency-name: coverage dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 140 +++++++++++++++++++++++-------------------------- pyproject.toml | 2 +- 2 files changed, 66 insertions(+), 76 deletions(-) diff --git a/poetry.lock b/poetry.lock index a5b6c9b737ff..bed342adc24f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1088,83 +1088,73 @@ portalocker = ">=1.6.0" [[package]] name = "coverage" -version = "7.6.1" +version = "7.6.4" description = "Code coverage measurement for Python" optional = true -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"}, - {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"}, - {file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"}, - {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"}, - {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"}, - {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"}, - {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"}, - {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"}, - {file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"}, - {file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"}, - {file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"}, - {file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"}, - {file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"}, - {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"}, - {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"}, - {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"}, - {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"}, - {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"}, - {file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"}, - {file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"}, - {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"}, - {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"}, - {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"}, - {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"}, - {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"}, - {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"}, - {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"}, - {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"}, - {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"}, - {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"}, - {file = "coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106"}, - {file = "coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9"}, - {file = "coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c"}, - {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a"}, - {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060"}, - {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862"}, - {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388"}, - {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155"}, - {file = "coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a"}, - {file = "coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129"}, - {file = "coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e"}, - {file = "coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962"}, - {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb"}, - {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704"}, - {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b"}, - {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f"}, - {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223"}, - {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"}, - {file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"}, - {file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"}, - {file = "coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0"}, - {file = "coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a"}, - {file = "coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b"}, - {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3"}, - {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de"}, - {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6"}, - {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569"}, - {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989"}, - {file = "coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7"}, - {file = "coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8"}, - {file = "coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255"}, - {file = "coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8"}, - {file = "coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2"}, - {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a"}, - {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc"}, - {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004"}, - {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb"}, - {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36"}, - {file = "coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c"}, - {file = "coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca"}, - {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"}, - {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"}, + {file = "coverage-7.6.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5f8ae553cba74085db385d489c7a792ad66f7f9ba2ee85bfa508aeb84cf0ba07"}, + {file = "coverage-7.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8165b796df0bd42e10527a3f493c592ba494f16ef3c8b531288e3d0d72c1f6f0"}, + {file = "coverage-7.6.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7c8b95bf47db6d19096a5e052ffca0a05f335bc63cef281a6e8fe864d450a72"}, + {file = "coverage-7.6.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ed9281d1b52628e81393f5eaee24a45cbd64965f41857559c2b7ff19385df51"}, + {file = "coverage-7.6.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0809082ee480bb8f7416507538243c8863ac74fd8a5d2485c46f0f7499f2b491"}, + {file = "coverage-7.6.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d541423cdd416b78626b55f123412fcf979d22a2c39fce251b350de38c15c15b"}, + {file = "coverage-7.6.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:58809e238a8a12a625c70450b48e8767cff9eb67c62e6154a642b21ddf79baea"}, + {file = "coverage-7.6.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c9b8e184898ed014884ca84c70562b4a82cbc63b044d366fedc68bc2b2f3394a"}, + {file = "coverage-7.6.4-cp310-cp310-win32.whl", hash = "sha256:6bd818b7ea14bc6e1f06e241e8234508b21edf1b242d49831831a9450e2f35fa"}, + {file = "coverage-7.6.4-cp310-cp310-win_amd64.whl", hash = "sha256:06babbb8f4e74b063dbaeb74ad68dfce9186c595a15f11f5d5683f748fa1d172"}, + {file = "coverage-7.6.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:73d2b73584446e66ee633eaad1a56aad577c077f46c35ca3283cd687b7715b0b"}, + {file = "coverage-7.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:51b44306032045b383a7a8a2c13878de375117946d68dcb54308111f39775a25"}, + {file = "coverage-7.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b3fb02fe73bed561fa12d279a417b432e5b50fe03e8d663d61b3d5990f29546"}, + {file = "coverage-7.6.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed8fe9189d2beb6edc14d3ad19800626e1d9f2d975e436f84e19efb7fa19469b"}, + {file = "coverage-7.6.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b369ead6527d025a0fe7bd3864e46dbee3aa8f652d48df6174f8d0bac9e26e0e"}, + {file = "coverage-7.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ade3ca1e5f0ff46b678b66201f7ff477e8fa11fb537f3b55c3f0568fbfe6e718"}, + {file = "coverage-7.6.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:27fb4a050aaf18772db513091c9c13f6cb94ed40eacdef8dad8411d92d9992db"}, + {file = "coverage-7.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4f704f0998911abf728a7783799444fcbbe8261c4a6c166f667937ae6a8aa522"}, + {file = "coverage-7.6.4-cp311-cp311-win32.whl", hash = "sha256:29155cd511ee058e260db648b6182c419422a0d2e9a4fa44501898cf918866cf"}, + {file = "coverage-7.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:8902dd6a30173d4ef09954bfcb24b5d7b5190cf14a43170e386979651e09ba19"}, + {file = "coverage-7.6.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:12394842a3a8affa3ba62b0d4ab7e9e210c5e366fbac3e8b2a68636fb19892c2"}, + {file = "coverage-7.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2b6b4c83d8e8ea79f27ab80778c19bc037759aea298da4b56621f4474ffeb117"}, + {file = "coverage-7.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d5b8007f81b88696d06f7df0cb9af0d3b835fe0c8dbf489bad70b45f0e45613"}, + {file = "coverage-7.6.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b57b768feb866f44eeed9f46975f3d6406380275c5ddfe22f531a2bf187eda27"}, + {file = "coverage-7.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5915fcdec0e54ee229926868e9b08586376cae1f5faa9bbaf8faf3561b393d52"}, + {file = "coverage-7.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0b58c672d14f16ed92a48db984612f5ce3836ae7d72cdd161001cc54512571f2"}, + {file = "coverage-7.6.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:2fdef0d83a2d08d69b1f2210a93c416d54e14d9eb398f6ab2f0a209433db19e1"}, + {file = "coverage-7.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8cf717ee42012be8c0cb205dbbf18ffa9003c4cbf4ad078db47b95e10748eec5"}, + {file = "coverage-7.6.4-cp312-cp312-win32.whl", hash = "sha256:7bb92c539a624cf86296dd0c68cd5cc286c9eef2d0c3b8b192b604ce9de20a17"}, + {file = "coverage-7.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:1032e178b76a4e2b5b32e19d0fd0abbce4b58e77a1ca695820d10e491fa32b08"}, + {file = "coverage-7.6.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:023bf8ee3ec6d35af9c1c6ccc1d18fa69afa1cb29eaac57cb064dbb262a517f9"}, + {file = "coverage-7.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b0ac3d42cb51c4b12df9c5f0dd2f13a4f24f01943627120ec4d293c9181219ba"}, + {file = "coverage-7.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8fe4984b431f8621ca53d9380901f62bfb54ff759a1348cd140490ada7b693c"}, + {file = "coverage-7.6.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5fbd612f8a091954a0c8dd4c0b571b973487277d26476f8480bfa4b2a65b5d06"}, + {file = "coverage-7.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dacbc52de979f2823a819571f2e3a350a7e36b8cb7484cdb1e289bceaf35305f"}, + {file = "coverage-7.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:dab4d16dfef34b185032580e2f2f89253d302facba093d5fa9dbe04f569c4f4b"}, + {file = "coverage-7.6.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:862264b12ebb65ad8d863d51f17758b1684560b66ab02770d4f0baf2ff75da21"}, + {file = "coverage-7.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5beb1ee382ad32afe424097de57134175fea3faf847b9af002cc7895be4e2a5a"}, + {file = "coverage-7.6.4-cp313-cp313-win32.whl", hash = "sha256:bf20494da9653f6410213424f5f8ad0ed885e01f7e8e59811f572bdb20b8972e"}, + {file = "coverage-7.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:182e6cd5c040cec0a1c8d415a87b67ed01193ed9ad458ee427741c7d8513d963"}, + {file = "coverage-7.6.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a181e99301a0ae128493a24cfe5cfb5b488c4e0bf2f8702091473d033494d04f"}, + {file = "coverage-7.6.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:df57bdbeffe694e7842092c5e2e0bc80fff7f43379d465f932ef36f027179806"}, + {file = "coverage-7.6.4-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bcd1069e710600e8e4cf27f65c90c7843fa8edfb4520fb0ccb88894cad08b11"}, + {file = "coverage-7.6.4-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99b41d18e6b2a48ba949418db48159d7a2e81c5cc290fc934b7d2380515bd0e3"}, + {file = "coverage-7.6.4-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6b1e54712ba3474f34b7ef7a41e65bd9037ad47916ccb1cc78769bae324c01a"}, + {file = "coverage-7.6.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:53d202fd109416ce011578f321460795abfe10bb901b883cafd9b3ef851bacfc"}, + {file = "coverage-7.6.4-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:c48167910a8f644671de9f2083a23630fbf7a1cb70ce939440cd3328e0919f70"}, + {file = "coverage-7.6.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:cc8ff50b50ce532de2fa7a7daae9dd12f0a699bfcd47f20945364e5c31799fef"}, + {file = "coverage-7.6.4-cp313-cp313t-win32.whl", hash = "sha256:b8d3a03d9bfcaf5b0141d07a88456bb6a4c3ce55c080712fec8418ef3610230e"}, + {file = "coverage-7.6.4-cp313-cp313t-win_amd64.whl", hash = "sha256:f3ddf056d3ebcf6ce47bdaf56142af51bb7fad09e4af310241e9db7a3a8022e1"}, + {file = "coverage-7.6.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9cb7fa111d21a6b55cbf633039f7bc2749e74932e3aa7cb7333f675a58a58bf3"}, + {file = "coverage-7.6.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:11a223a14e91a4693d2d0755c7a043db43d96a7450b4f356d506c2562c48642c"}, + {file = "coverage-7.6.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a413a096c4cbac202433c850ee43fa326d2e871b24554da8327b01632673a076"}, + {file = "coverage-7.6.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00a1d69c112ff5149cabe60d2e2ee948752c975d95f1e1096742e6077affd376"}, + {file = "coverage-7.6.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f76846299ba5c54d12c91d776d9605ae33f8ae2b9d1d3c3703cf2db1a67f2c0"}, + {file = "coverage-7.6.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fe439416eb6380de434886b00c859304338f8b19f6f54811984f3420a2e03858"}, + {file = "coverage-7.6.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:0294ca37f1ba500667b1aef631e48d875ced93ad5e06fa665a3295bdd1d95111"}, + {file = "coverage-7.6.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6f01ba56b1c0e9d149f9ac85a2f999724895229eb36bd997b61e62999e9b0901"}, + {file = "coverage-7.6.4-cp39-cp39-win32.whl", hash = "sha256:bc66f0bf1d7730a17430a50163bb264ba9ded56739112368ba985ddaa9c3bd09"}, + {file = "coverage-7.6.4-cp39-cp39-win_amd64.whl", hash = "sha256:c481b47f6b5845064c65a7bc78bc0860e635a9b055af0df46fdf1c58cebf8e8f"}, + {file = "coverage-7.6.4-pp39.pp310-none-any.whl", hash = "sha256:3c65d37f3a9ebb703e710befdc489a38683a5b152242664b973a7b7b22348a4e"}, + {file = "coverage-7.6.4.tar.gz", hash = "sha256:29fc0f17b1d3fea332f8001d4558f8214af7f1d87a345f3a133c901d60347c73"}, ] [package.dependencies] @@ -3438,4 +3428,4 @@ upnp = ["miniupnpc"] [metadata] lock-version = "2.0" python-versions = ">=3.9, <3.13" -content-hash = "2cb2d09f09cd70719d304f344edf40c50753d47d76510e1db035f5a90ee66559" +content-hash = "b3e7954f358d50320cf0aa1fe16dee1f17916c45e14377b838c671240e4962b8" diff --git a/pyproject.toml b/pyproject.toml index 5a916c8b8da1..dbdc231df899 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -81,7 +81,7 @@ hsms = "0.3.1" aiohttp_cors = { version = "0.7.0", optional = true } black = { version = "24.8.0", optional = true } build = { version = "1.2.1", optional = true } -coverage = { version = "7.6.1", optional = true } +coverage = { version = "7.6.4", optional = true } diff-cover = { version = "9.2.0", optional = true } flake8 = { version = "7.1.1", optional = true } isort = { version = "5.13.2", optional = true } From 2588600d1ca48e6ffeeb7889c4e21a6a8d53437c Mon Sep 17 00:00:00 2001 From: Kyle Altendorf Date: Thu, 24 Oct 2024 14:52:10 -0400 Subject: [PATCH 59/69] more lowercase types such as in comments (#18737) --- chia/_tests/blockchain/test_blockchain.py | 2 +- chia/_tests/farmer_harvester/test_farmer.py | 16 ++++++++-------- chia/_tests/util/gen_ssl_certs.py | 7 ++----- .../wallet/test_singleton_lifecycle_fast.py | 2 +- chia/_tests/wallet/test_transaction_store.py | 4 ++-- chia/cmds/wallet_funcs.py | 2 +- chia/consensus/block_body_validation.py | 2 +- chia/data_layer/util/plugin.py | 2 +- chia/full_node/full_node_store.py | 4 ++-- chia/full_node/mempool_manager.py | 4 ++-- chia/full_node/weight_proof.py | 2 +- chia/protocols/wallet_protocol.py | 6 +++--- chia/rpc/data_layer_rpc_api.py | 2 +- chia/rpc/farmer_rpc_api.py | 2 +- chia/types/eligible_coin_spends.py | 6 +++--- chia/util/streamable.py | 4 ++-- chia/wallet/puzzles/clawback/drivers.py | 2 +- chia/wallet/trade_manager.py | 12 ++++++------ chia/wallet/util/merkle_tree.py | 4 ++-- chia/wallet/util/merkle_utils.py | 4 ++-- chia/wallet/wallet_state_manager.py | 2 +- 21 files changed, 44 insertions(+), 47 deletions(-) diff --git a/chia/_tests/blockchain/test_blockchain.py b/chia/_tests/blockchain/test_blockchain.py index 55bbbe453a6d..40ad49a5b2e6 100644 --- a/chia/_tests/blockchain/test_blockchain.py +++ b/chia/_tests/blockchain/test_blockchain.py @@ -2808,7 +2808,7 @@ async def test_max_coin_amount(self, db_version: int, bt: BlockTools) -> None: # wt: WalletTool = bt_2.get_pool_wallet_tool() - # condition_dict: Dict[ConditionOpcode, List[ConditionWithArgs]] = {ConditionOpcode.CREATE_COIN: []} + # condition_dict: dict[ConditionOpcode, list[ConditionWithArgs]] = {ConditionOpcode.CREATE_COIN: []} # output = ConditionWithArgs(ConditionOpcode.CREATE_COIN, [bt_2.pool_ph, int_to_bytes(2 ** 64)]) # condition_dict[ConditionOpcode.CREATE_COIN].append(output) diff --git a/chia/_tests/farmer_harvester/test_farmer.py b/chia/_tests/farmer_harvester/test_farmer.py index 0cb53e22fa94..ab1ccec92354 100644 --- a/chia/_tests/farmer_harvester/test_farmer.py +++ b/chia/_tests/farmer_harvester/test_farmer.py @@ -348,7 +348,7 @@ def test_increment_pool_stats(case: IncrementPoolStatsCase) -> None: expected_pool_stats={ "points_found_since_start": 0, # Original item format here is (timestamp, value) but we'll ignore timestamp part - # so every `xxx_24h` item in this dict will be List[Any]. + # so every `xxx_24h` item in this dict will be list[Any]. "points_found_24h": [], "points_acknowledged_since_start": 0, "points_acknowledged_24h": [], @@ -380,7 +380,7 @@ def test_increment_pool_stats(case: IncrementPoolStatsCase) -> None: expected_pool_stats={ "points_found_since_start": 0, # Original item format here is (timestamp, value) but we'll ignore timestamp part - # so every `xxx_24h` item in this dict will be List[Any]. + # so every `xxx_24h` item in this dict will be list[Any]. "points_found_24h": [], "points_acknowledged_since_start": 0, "points_acknowledged_24h": [], @@ -411,7 +411,7 @@ def test_increment_pool_stats(case: IncrementPoolStatsCase) -> None: expected_pool_stats={ "points_found_since_start": 0, # Original item format here is (timestamp, value) but we'll ignore timestamp part - # so every `xxx_24h` item in this dict will be List[Any]. + # so every `xxx_24h` item in this dict will be list[Any]. "points_found_24h": [], "points_acknowledged_since_start": 0, "points_acknowledged_24h": [], @@ -442,7 +442,7 @@ def test_increment_pool_stats(case: IncrementPoolStatsCase) -> None: expected_pool_stats={ "points_found_since_start": 0, # Original item format here is (timestamp, value) but we'll ignore timestamp part - # so every `xxx_24h` item in this dict will be List[Any]. + # so every `xxx_24h` item in this dict will be list[Any]. "points_found_24h": [], "points_acknowledged_since_start": 0, "points_acknowledged_24h": [], @@ -473,7 +473,7 @@ def test_increment_pool_stats(case: IncrementPoolStatsCase) -> None: expected_pool_stats={ "points_found_since_start": 0, # Original item format here is (timestamp, value) but we'll ignore timestamp part - # so every `xxx_24h` item in this dict will be List[Any]. + # so every `xxx_24h` item in this dict will be list[Any]. "points_found_24h": [], "points_acknowledged_since_start": 0, "points_acknowledged_24h": [], @@ -504,7 +504,7 @@ def test_increment_pool_stats(case: IncrementPoolStatsCase) -> None: expected_pool_stats={ "points_found_since_start": 0, # Original item format here is (timestamp, value) but we'll ignore timestamp part - # so every `xxx_24h` item in this dict will be List[Any]. + # so every `xxx_24h` item in this dict will be list[Any]. "points_found_24h": [], "points_acknowledged_since_start": 0, "points_acknowledged_24h": [], @@ -535,7 +535,7 @@ def test_increment_pool_stats(case: IncrementPoolStatsCase) -> None: expected_pool_stats={ "points_found_since_start": 1, # Original item format here is (timestamp, value) but we'll ignore timestamp part - # so every `xxx_24h` item in this dict will be List[Any]. + # so every `xxx_24h` item in this dict will be list[Any]. "points_found_24h": [1], "points_acknowledged_since_start": 0, "points_acknowledged_24h": [], @@ -768,7 +768,7 @@ def override_pool_state(overrides: dict[str, Any]) -> dict[str, Any]: pool_state = { "points_found_since_start": 0, # Original item format here is (timestamp, value) but we'll ignore timestamp part - # so every `xxx_24h` item in this dict will be List[Any]. + # so every `xxx_24h` item in this dict will be list[Any]. "points_found_24h": [], "points_acknowledged_since_start": 0, "points_acknowledged_24h": [], diff --git a/chia/_tests/util/gen_ssl_certs.py b/chia/_tests/util/gen_ssl_certs.py index b15ccf97eb9d..d80b27201304 100644 --- a/chia/_tests/util/gen_ssl_certs.py +++ b/chia/_tests/util/gen_ssl_certs.py @@ -43,9 +43,6 @@ def patched_write_ssl_cert_and_key(cert_path: Path, cert_data: bytes, key_path: private_ca_key: Optional[bytes] = None capture_cert_and_key = True - print("from typing import Tuple") - print() - make_ca_cert(Path("SSL_TEST_PRIVATE_CA_CRT"), Path("SSL_TEST_PRIVATE_CA_KEY")) capture_cert_and_key = False @@ -92,11 +89,11 @@ def patched_write_ssl_cert_and_key(cert_path: Path, cert_data: bytes, key_path: append_str = "" if suffix == "" else f"_{suffix}" print( - f"SSL_TEST_PRIVATE_CA_CERT_AND_KEY{append_str}: Tuple[bytes, bytes] = " + f"SSL_TEST_PRIVATE_CA_CERT_AND_KEY{append_str}: tuple[bytes, bytes] = " "(SSL_TEST_PRIVATE_CA_CRT, SSL_TEST_PRIVATE_CA_KEY)" ) print() - print(f"SSL_TEST_NODE_CERTS_AND_KEYS{append_str}: Dict[str, Dict[str, Dict[str, bytes]]] = {{") + print(f"SSL_TEST_NODE_CERTS_AND_KEYS{append_str}: dict[str, dict[str, dict[str, bytes]]] = {{") for node_name, cert_type_dict in node_certs_and_keys.items(): print(f' "{node_name}": {{') for cert_type, cert_dict in cert_type_dict.items(): diff --git a/chia/_tests/wallet/test_singleton_lifecycle_fast.py b/chia/_tests/wallet/test_singleton_lifecycle_fast.py index 09e2318508c1..746571466dc7 100644 --- a/chia/_tests/wallet/test_singleton_lifecycle_fast.py +++ b/chia/_tests/wallet/test_singleton_lifecycle_fast.py @@ -56,7 +56,7 @@ def satisfies_hint(obj: T, type_hint: type[T]) -> bool: if not isinstance(obj, origin): return False if len(args) > 0: - # Tuple[T, ...] gets handled just like List[T] + # tuple[T, ...] gets handled just like list[T] if origin is list or (origin is tuple and args[-1] is Ellipsis): object_hint_pairs.extend((item, args[0]) for item in obj) elif origin is tuple: diff --git a/chia/_tests/wallet/test_transaction_store.py b/chia/_tests/wallet/test_transaction_store.py index 13378db63305..6264a66a2e31 100644 --- a/chia/_tests/wallet/test_transaction_store.py +++ b/chia/_tests/wallet/test_transaction_store.py @@ -37,11 +37,11 @@ [coin_2, coin_3], # additions [coin_1], # removals uint32(1), # wallet_id - [], # List[Tuple[str, uint8, Optional[str]]] sent_to + [], # list[tuple[str, uint8, Optional[str]]] sent_to bytes32(bytes32.random(module_seeded_random)), # trade_id uint32(TransactionType.OUTGOING_TX), # type bytes32(bytes32.random(module_seeded_random)), # name - [], # List[Tuple[bytes32, List[bytes]]] memos + [], # list[tuple[bytes32, list[bytes]]] memos ConditionValidTimes(), ) diff --git a/chia/cmds/wallet_funcs.py b/chia/cmds/wallet_funcs.py index e1c8da215f79..da407b33bfa7 100644 --- a/chia/cmds/wallet_funcs.py +++ b/chia/cmds/wallet_funcs.py @@ -423,7 +423,7 @@ async def make_offer( else: offer_dict: dict[Union[uint32, str], int] = {} driver_dict: dict[str, Any] = {} - printable_dict: dict[str, tuple[str, int, int]] = {} # Dict[asset_name, Tuple[amount, unit, multiplier]] + printable_dict: dict[str, tuple[str, int, int]] = {} # dict[asset_name, tuple[amount, unit, multiplier]] royalty_asset_dict: dict[Any, tuple[Any, uint16]] = {} fungible_asset_dict: dict[Any, uint64] = {} for item in [*offers, *requests]: diff --git a/chia/consensus/block_body_validation.py b/chia/consensus/block_body_validation.py index 017f916362c0..796a0699f055 100644 --- a/chia/consensus/block_body_validation.py +++ b/chia/consensus/block_body_validation.py @@ -312,7 +312,7 @@ async def validate_block_body( if block.transactions_generator is None: return Err.INVALID_TRANSACTIONS_GENERATOR_REFS_ROOT, None - # The generator_refs_root must be the hash of the concatenation of the List[uint32] + # The generator_refs_root must be the hash of the concatenation of the list[uint32] generator_refs_hash = std_hash(b"".join([i.stream_to_bytes() for i in block.transactions_generator_ref_list])) if block.transactions_info.generator_refs_root != generator_refs_hash: return Err.INVALID_TRANSACTIONS_GENERATOR_REFS_ROOT, None diff --git a/chia/data_layer/util/plugin.py b/chia/data_layer/util/plugin.py index 7f5aa8e7e568..12d1f8bb27ba 100644 --- a/chia/data_layer/util/plugin.py +++ b/chia/data_layer/util/plugin.py @@ -20,7 +20,7 @@ async def load_plugin_configurations(root_path: Path, config_type: str, log: log config_type (str): The type of plugins to load ('downloaders' or 'uploaders'). Returns: - List[PluginRemote]: A list of valid PluginRemote instances for the specified plugin type. + list[PluginRemote]: A list of valid PluginRemote instances for the specified plugin type. """ config_path = root_path / "plugins" / config_type config_path.mkdir(parents=True, exist_ok=True) # Ensure the config directory exists diff --git a/chia/full_node/full_node_store.py b/chia/full_node/full_node_store.py index c25aa5f26e24..3f77f5f4f7a1 100644 --- a/chia/full_node/full_node_store.py +++ b/chia/full_node/full_node_store.py @@ -92,7 +92,7 @@ class FullNodeStore: candidate_backup_blocks: dict[bytes32, tuple[uint32, UnfinishedBlock]] # Block hashes of unfinished blocks that we have seen recently. This is - # effectively a Set[bytes32] but in order to evict the oldest items first, + # effectively a set[bytes32] but in order to evict the oldest items first, # we use a Dict that preserves insertion order, and remove from the # beginning seen_unfinished_blocks: dict[bytes32, None] @@ -135,7 +135,7 @@ class FullNodeStore: recent_eos: LRUCache[bytes32, tuple[EndOfSubSlotBundle, float]] pending_tx_request: dict[bytes32, bytes32] # tx_id: peer_id - peers_with_tx: dict[bytes32, set[bytes32]] # tx_id: Set[peer_ids} + peers_with_tx: dict[bytes32, set[bytes32]] # tx_id: set[peer_ids} tx_fetch_tasks: dict[bytes32, asyncio.Task[None]] # Task id: task serialized_wp_message: Optional[Message] serialized_wp_message_tip: Optional[bytes32] diff --git a/chia/full_node/mempool_manager.py b/chia/full_node/mempool_manager.py index 093eb4108a5b..ad73c40b8582 100644 --- a/chia/full_node/mempool_manager.py +++ b/chia/full_node/mempool_manager.py @@ -338,7 +338,7 @@ async def add_spend_bundle( Returns: Optional[uint64]: cost of the entire transaction, None iff status is FAILED MempoolInclusionStatus: SUCCESS (should add to pool), FAILED (cannot add), and PENDING (can add later) - List[MempoolRemoveInfo]: conflicting mempool items which were removed, if no Err + list[MempoolRemoveInfo]: conflicting mempool items which were removed, if no Err Optional[Err]: Err is set iff status is FAILED """ @@ -401,7 +401,7 @@ async def validate_spend_bundle( Returns: Optional[Err]: Err is set if we cannot add to the mempool, None if we will immediately add to mempool Optional[MempoolItem]: the item to add (to mempool or pending pool) - List[bytes32]: conflicting mempool items to remove, if no Err + list[bytes32]: conflicting mempool items to remove, if no Err """ start_time = time.time() if self.peak is None: diff --git a/chia/full_node/weight_proof.py b/chia/full_node/weight_proof.py index f5f2c29dd713..1f03af771efb 100644 --- a/chia/full_node/weight_proof.py +++ b/chia/full_node/weight_proof.py @@ -679,7 +679,7 @@ def _sample_sub_epoch( weight_to_check: Optional[list[uint128]], ) -> bool: """ - weight_to_check: List[uint128] is expected to be sorted + weight_to_check: list[uint128] is expected to be sorted """ if weight_to_check is None: return True diff --git a/chia/protocols/wallet_protocol.py b/chia/protocols/wallet_protocol.py index d6a55d61473a..bdb06523e81a 100644 --- a/chia/protocols/wallet_protocol.py +++ b/chia/protocols/wallet_protocol.py @@ -208,9 +208,9 @@ class RegisterForPhUpdates(Streamable): # @streamable # @dataclass(frozen=True) # class RespondToPhUpdates(Streamable): -# puzzle_hashes: List[bytes32] +# puzzle_hashes: list[bytes32] # min_height: uint32 -# coin_states: List[CoinState] +# coin_states: list[CoinState] @streamable @@ -267,7 +267,7 @@ class RespondSESInfo(Streamable): @dataclass(frozen=True) class RequestFeeEstimates(Streamable): """ - time_targets (List[uint64]): Epoch timestamps in seconds to estimate FeeRates for. + time_targets (list[uint64]): Epoch timestamps in seconds to estimate FeeRates for. """ time_targets: list[uint64] diff --git a/chia/rpc/data_layer_rpc_api.py b/chia/rpc/data_layer_rpc_api.py index f7f7ca308e32..dbd93a18916b 100644 --- a/chia/rpc/data_layer_rpc_api.py +++ b/chia/rpc/data_layer_rpc_api.py @@ -46,7 +46,7 @@ def process_change(change: dict[str, Any]) -> dict[str, Any]: # TODO: A full class would likely be nice for this so downstream doesn't - # have to deal with maybe-present attributes or Dict[str, Any] hints. + # have to deal with maybe-present attributes or dict[str, Any] hints. reference_node_hash = change.get("reference_node_hash") if reference_node_hash is not None: reference_node_hash = bytes32.from_hexstr(reference_node_hash) diff --git a/chia/rpc/farmer_rpc_api.py b/chia/rpc/farmer_rpc_api.py index 8415a119781b..3e608080b89b 100644 --- a/chia/rpc/farmer_rpc_api.py +++ b/chia/rpc/farmer_rpc_api.py @@ -316,7 +316,7 @@ async def get_harvesters_summary(self, _: dict[str, object]) -> EndpointResult: return await self.service.get_harvesters(True) async def get_harvester_plots_valid(self, request_dict: dict[str, object]) -> EndpointResult: - # TODO: Consider having a extra List[PlotInfo] in Receiver to avoid rebuilding the list for each call + # TODO: Consider having a extra list[PlotInfo] in Receiver to avoid rebuilding the list for each call request = PlotInfoRequestData.from_json_dict(request_dict) plot_list = list(self.service.get_receiver(request.node_id).plots().values()) # Apply filter diff --git a/chia/types/eligible_coin_spends.py b/chia/types/eligible_coin_spends.py index b14b1612adb2..fdc27c8bc447 100644 --- a/chia/types/eligible_coin_spends.py +++ b/chia/types/eligible_coin_spends.py @@ -98,7 +98,7 @@ def perform_the_fast_forward( Returns: CoinSpend: the new coin spend after performing the fast forward - List[Coin]: the updated additions that point to the new coin to spend + list[Coin]: the updated additions that point to the new coin to spend Raises: ValueError if none of the additions are considered to be the singleton's @@ -158,9 +158,9 @@ def get_deduplication_info( max_cost: the maximum limit when running for cost Returns: - List[CoinSpend]: list of unique coin spends in this mempool item + list[CoinSpend]: list of unique coin spends in this mempool item uint64: the cost we're saving by deduplicating eligible coins - List[Coin]: list of unique additions in this mempool item + list[Coin]: list of unique additions in this mempool item Raises: ValueError to skip the mempool item we're currently in, if it's diff --git a/chia/util/streamable.py b/chia/util/streamable.py index 6ddde747f7d9..3b4684789185 100644 --- a/chia/util/streamable.py +++ b/chia/util/streamable.py @@ -530,8 +530,8 @@ class Streamable: An item is one of: * primitive - * Tuple[item1, .. itemx] - * List[item1, .. itemx] + * tuple[item1, .. itemx] + * list[item1, .. itemx] * Optional[item] * Custom item diff --git a/chia/wallet/puzzles/clawback/drivers.py b/chia/wallet/puzzles/clawback/drivers.py index acf63ef0db7f..fcc9875aee72 100644 --- a/chia/wallet/puzzles/clawback/drivers.py +++ b/chia/wallet/puzzles/clawback/drivers.py @@ -76,7 +76,7 @@ def create_merkle_proof(merkle_tree: MerkleTree, puzzle_hash: bytes32) -> Progra To spend a p2_1_of_n clawback we recreate the full merkle tree The required proof is then selected from the merkle tree based on the puzzle_hash of the puzzle we want to execute - Returns a proof: (int, List[bytes32]) which can be provided to the p2_1_of_n solution + Returns a proof: (int, list[bytes32]) which can be provided to the p2_1_of_n solution """ proof = merkle_tree.generate_proof(puzzle_hash) program: Program = Program.to((proof[0], proof[1][0])) diff --git a/chia/wallet/trade_manager.py b/chia/wallet/trade_manager.py index b29c89a6ca99..cac727ad22e8 100644 --- a/chia/wallet/trade_manager.py +++ b/chia/wallet/trade_manager.py @@ -72,20 +72,20 @@ class TradeManager: { "coin": bytes "parent_spend": bytes - "siblings": List[bytes] # other coins of the same type being offered - "sibling_spends": List[bytes] # The parent spends for the siblings - "sibling_puzzles": List[Program] # The inner puzzles of the siblings (always OFFER_MOD) - "sibling_solutions": List[Program] # The inner solution of the siblings + "siblings": list[bytes] # other coins of the same type being offered + "sibling_spends": list[bytes] # The parent spends for the siblings + "sibling_puzzles": list[Program] # The inner puzzles of the siblings (always OFFER_MOD) + "sibling_solutions": list[Program] # The inner solution of the siblings } ) Wallet: - Segments in this code that call general wallet methods are highlighted by comments: # ATTENTION: new wallets - To be able to be traded, a wallet must implement these methods on itself: - - generate_signed_transaction(...) -> List[TransactionRecord] (See cat_wallet.py for full API) + - generate_signed_transaction(...) -> list[TransactionRecord] (See cat_wallet.py for full API) - convert_puzzle_hash(puzzle_hash: bytes32) -> bytes32 # Converts a puzzlehash from outer to inner puzzle - get_puzzle_info(asset_id: bytes32) -> PuzzleInfo - - get_coins_to_offer(asset_id: bytes32, amount: uint64) -> Set[Coin] + - get_coins_to_offer(asset_id: bytes32, amount: uint64) -> set[Coin] - If you would like assets from your wallet to be referenced with just a wallet ID, you must also implement: - get_asset_id() -> bytes32 - Finally, you must make sure that your wallet will respond appropriately when these WSM methods are called: diff --git a/chia/wallet/util/merkle_tree.py b/chia/wallet/util/merkle_tree.py index 66458c01bbf8..545593eb4787 100644 --- a/chia/wallet/util/merkle_tree.py +++ b/chia/wallet/util/merkle_tree.py @@ -76,14 +76,14 @@ def _proof( if first_hash[0] is not None: final_list = first_hash[1] # TODO: handle hints - # error: Item "None" of "Optional[List[bytes32]]" has no attribute "append" [union-attr] + # error: Item "None" of "Optional[list[bytes32]]" has no attribute "append" [union-attr] final_list.append(rest_hash[2]) # type: ignore[union-attr] bit_num = first_hash[3] final_path = first_hash[0] elif rest_hash[0] is not None: final_list = rest_hash[1] # TODO: handle hints - # error: Item "None" of "Optional[List[bytes32]]" has no attribute "append" [union-attr] + # error: Item "None" of "Optional[list[bytes32]]" has no attribute "append" [union-attr] final_list.append(first_hash[2]) # type: ignore[union-attr] bit_num = rest_hash[3] # TODO: handle hints diff --git a/chia/wallet/util/merkle_utils.py b/chia/wallet/util/merkle_utils.py index 960e8738dd5e..3df8c4767429 100644 --- a/chia/wallet/util/merkle_utils.py +++ b/chia/wallet/util/merkle_utils.py @@ -5,8 +5,8 @@ from chia.types.blockchain_format.sized_bytes import bytes32 -TupleTree = Any # Union[bytes32, Tuple["TupleTree", "TupleTree"]] -Proof_Tree_Type = Any # Union[bytes32, Tuple[bytes32, "Proof_Tree_Type"]] +TupleTree = Any # Union[bytes32, tuple["TupleTree", "TupleTree"]] +Proof_Tree_Type = Any # Union[bytes32, tuple[bytes32, "Proof_Tree_Type"]] HASH_TREE_PREFIX = bytes([2]) diff --git a/chia/wallet/wallet_state_manager.py b/chia/wallet/wallet_state_manager.py index f614287bbb62..6190ed568c26 100644 --- a/chia/wallet/wallet_state_manager.py +++ b/chia/wallet/wallet_state_manager.py @@ -721,7 +721,7 @@ async def get_unconfirmed_balance( Returns the balance, including coinbase rewards that are not spendable, and unconfirmed transactions. """ - # This API should change so that get_balance_from_coin_records is called for Set[WalletCoinRecord] + # This API should change so that get_balance_from_coin_records is called for set[WalletCoinRecord] # and this method is called only for the unspent_coin_records==None case. if unspent_coin_records is None: wallet_type: WalletType = self.wallets[uint32(wallet_id)].type() From f4ff9d621e29658d448530cf44df16d0a550c3eb Mon Sep 17 00:00:00 2001 From: Amine Khaldi Date: Thu, 24 Oct 2024 19:52:22 +0100 Subject: [PATCH 60/69] CHIA-1664 Deduplicate cancel_task_safe (#18745) Deduplicate cancel_task_safe. --- chia/daemon/server.py | 12 +++-------- chia/full_node/full_node.py | 2 +- chia/server/node_discovery.py | 20 +++++++------------ chia/{full_node => }/util/safe_cancel_task.py | 2 ++ 4 files changed, 13 insertions(+), 23 deletions(-) rename chia/{full_node => }/util/safe_cancel_task.py (95%) diff --git a/chia/daemon/server.py b/chia/daemon/server.py index 89a4dc918399..ea9c64948273 100644 --- a/chia/daemon/server.py +++ b/chia/daemon/server.py @@ -45,6 +45,7 @@ from chia.util.lock import Lockfile, LockfileError from chia.util.log_exceptions import log_exceptions from chia.util.network import WebServer +from chia.util.safe_cancel_task import cancel_task_safe from chia.util.service_groups import validate_service from chia.util.setproctitle import setproctitle from chia.util.ws_message import WsRpcMessage, create_payload, format_response @@ -241,19 +242,12 @@ async def _accept_signal( self.log.info("Received signal %s (%s), shutting down.", signal_.name, signal_.value) await self.stop() - def cancel_task_safe(self, task: Optional[asyncio.Task]): - if task is not None: - try: - task.cancel() - except Exception as e: - self.log.error(f"Error while canceling task.{e} {task}") - async def stop_command(self, websocket: WebSocketResponse, request: dict[str, Any] = {}) -> dict[str, Any]: return await self.stop() async def stop(self) -> dict[str, Any]: - self.cancel_task_safe(self.ping_job) - self.cancel_task_safe(self.state_changed_task) + cancel_task_safe(self.ping_job, self.log) + cancel_task_safe(self.state_changed_task, self.log) service_names = list(self.services.keys()) stop_service_jobs = [ asyncio.create_task(kill_service(self.root_path, self.services, s_n)) for s_n in service_names diff --git a/chia/full_node/full_node.py b/chia/full_node/full_node.py index 0de2ae677de0..29fdfaeab1ee 100644 --- a/chia/full_node/full_node.py +++ b/chia/full_node/full_node.py @@ -47,7 +47,6 @@ from chia.full_node.subscriptions import PeerSubscriptions, peers_for_spend_bundle from chia.full_node.sync_store import Peak, SyncStore from chia.full_node.tx_processing_queue import TransactionQueue -from chia.full_node.util.safe_cancel_task import cancel_task_safe from chia.full_node.weight_proof import WeightProofHandler from chia.protocols import farmer_protocol, full_node_protocol, timelord_protocol, wallet_protocol from chia.protocols.farmer_protocol import SignagePointSourceData, SPSubSlotSourceData, SPVDFSourceData @@ -89,6 +88,7 @@ from chia.util.log_exceptions import log_exceptions from chia.util.path import path_from_root from chia.util.profiler import enable_profiler, mem_profile_task, profile_task +from chia.util.safe_cancel_task import cancel_task_safe # This is the result of calling peak_post_processing, which is then fed into peak_post_processing_2 diff --git a/chia/server/node_discovery.py b/chia/server/node_discovery.py index 31f2d204386f..57afa4f204ae 100644 --- a/chia/server/node_discovery.py +++ b/chia/server/node_discovery.py @@ -26,6 +26,7 @@ from chia.util.ints import uint16, uint64 from chia.util.ip_address import IPAddress from chia.util.network import resolve +from chia.util.safe_cancel_task import cancel_task_safe MAX_PEERS_RECEIVED_PER_REQUEST = 1000 MAX_TOTAL_PEERS_RECEIVED = 3000 @@ -103,21 +104,14 @@ async def start_tasks(self) -> None: async def _close_common(self) -> None: self.is_closed = True - self.cancel_task_safe(self.connect_peers_task) - self.cancel_task_safe(self.serialize_task) - self.cancel_task_safe(self.cleanup_task) + cancel_task_safe(self.connect_peers_task, self.log) + cancel_task_safe(self.serialize_task, self.log) + cancel_task_safe(self.cleanup_task, self.log) for t in self.pending_tasks: - self.cancel_task_safe(t) + cancel_task_safe(t, self.log) if len(self.pending_tasks) > 0: await asyncio.wait(self.pending_tasks) - def cancel_task_safe(self, task: Optional[asyncio.Task[None]]) -> None: - if task is not None: - try: - task.cancel() - except Exception as e: - self.log.error(f"Error while canceling task.{e} {task}") - async def on_connect(self, peer: WSChiaConnection) -> None: if ( peer.is_outbound is False @@ -534,8 +528,8 @@ async def start(self) -> None: async def close(self) -> None: await self._close_common() - self.cancel_task_safe(self.self_advertise_task) - self.cancel_task_safe(self.address_relay_task) + cancel_task_safe(self.self_advertise_task, self.log) + cancel_task_safe(self.address_relay_task, self.log) async def _periodically_self_advertise_and_clean_data(self) -> None: while not self.is_closed: diff --git a/chia/full_node/util/safe_cancel_task.py b/chia/util/safe_cancel_task.py similarity index 95% rename from chia/full_node/util/safe_cancel_task.py rename to chia/util/safe_cancel_task.py index 2fb08772f154..b029e31ad1b4 100644 --- a/chia/full_node/util/safe_cancel_task.py +++ b/chia/util/safe_cancel_task.py @@ -1,3 +1,5 @@ +# Package: utils + from __future__ import annotations import asyncio From 030ffddf35829b463e1c1cdb63dc85013a9123e0 Mon Sep 17 00:00:00 2001 From: Kyle Altendorf Date: Thu, 24 Oct 2024 14:52:32 -0400 Subject: [PATCH 61/69] add vpa virtual project package (#18754) --- chia/util/virtual_project_analysis.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/chia/util/virtual_project_analysis.py b/chia/util/virtual_project_analysis.py index 6422dbe74f16..d5901fd34ba8 100644 --- a/chia/util/virtual_project_analysis.py +++ b/chia/util/virtual_project_analysis.py @@ -1,4 +1,4 @@ -# Package: utils +# Package: virtual_project_analysis from __future__ import annotations From 36d7c0146afddb52842d62ffe1a4e42f8100c4ce Mon Sep 17 00:00:00 2001 From: Amine Khaldi Date: Thu, 24 Oct 2024 19:52:56 +0100 Subject: [PATCH 62/69] CHIA-1669 Simplify tx_additions and tx_removals in validate_block_merkle_roots (#18758) Simplify tx_additions and tx_removals in validate_block_merkle_roots. --- chia/consensus/block_root_validation.py | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/chia/consensus/block_root_validation.py b/chia/consensus/block_root_validation.py index 215cf6e2e7a6..cdda927f3611 100644 --- a/chia/consensus/block_root_validation.py +++ b/chia/consensus/block_root_validation.py @@ -12,14 +12,9 @@ def validate_block_merkle_roots( block_additions_root: bytes32, block_removals_root: bytes32, - tx_additions: Optional[list[tuple[Coin, bytes32]]] = None, - tx_removals: Optional[list[bytes32]] = None, + tx_additions: list[tuple[Coin, bytes32]], + tx_removals: list[bytes32], ) -> Optional[Err]: - if tx_removals is None: - tx_removals = [] - if tx_additions is None: - tx_additions = [] - # Create addition Merkle set puzzlehash_coins_map: dict[bytes32, list[bytes32]] = {} From 15d0daf3394d1529ae0329c5b1fe2d6e64cbcd7e Mon Sep 17 00:00:00 2001 From: Earle Lowe <30607889+emlowe@users.noreply.github.com> Date: Thu, 24 Oct 2024 11:53:22 -0700 Subject: [PATCH 63/69] Update clvm_tools_rs to 0.1.45 (#18761) --- poetry.lock | 17 +++++++++-------- pyproject.toml | 2 +- 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/poetry.lock b/poetry.lock index bed342adc24f..fc77a1709392 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1031,17 +1031,18 @@ dev = ["pytest"] [[package]] name = "clvm-tools-rs" -version = "0.1.43" +version = "0.1.45" description = "tools for working with chialisp language; compiler, repl, python and wasm bindings" optional = false python-versions = "*" files = [ - {file = "clvm_tools_rs-0.1.43-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:0dc68bdc7704d502d0193a9634764fffd2d618207b4a0260dbb32938881dad6c"}, - {file = "clvm_tools_rs-0.1.43-cp38-abi3-macosx_11_0_x86_64.whl", hash = "sha256:49f5065a64a560e9d5ffaf5d30f074cf65a1196a2d9c554724bfff646a8697cc"}, - {file = "clvm_tools_rs-0.1.43-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbc10526fc6abd606d337f93ca89ea52e95e390134d6d15620a3ad9d1a122ba5"}, - {file = "clvm_tools_rs-0.1.43-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3548f9c870e20c1dcdf90820046803d3a9b487a12c0c5d0563031ae7677e64a8"}, - {file = "clvm_tools_rs-0.1.43-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:0d905bb57c3fca1e9b227ed233974a630ac912929091287800c82fdd6a51150a"}, - {file = "clvm_tools_rs-0.1.43-cp38-abi3-win_amd64.whl", hash = "sha256:423915b4098d38112ed8e7b8fcac1eafacb7fb2ac11cf5c371d7853a85577d4f"}, + {file = "clvm_tools_rs-0.1.45-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:fc5a60fa6bbbfb8875258266b82251c4cc9279e3d6435eaaada0b2c033816b1a"}, + {file = "clvm_tools_rs-0.1.45-cp38-abi3-macosx_13_0_x86_64.whl", hash = "sha256:d1cd3240fdb9560c5cc860ac15196a00b70935827018deacf01651fcd9bfa6e5"}, + {file = "clvm_tools_rs-0.1.45-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:fe0dcb7cad0edd1c9b1f7b0e3e6864cea1e13c3f8a6667de83c4721ff5626b4f"}, + {file = "clvm_tools_rs-0.1.45-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:792fb5be40d317e32b86e44f9a3bbbf4d96827b304877e678eaff0abca212b3e"}, + {file = "clvm_tools_rs-0.1.45-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:079b849e0c289b6c8e7e93c027d9ee8083714ad07c5d19a1c0f7bd88efa9615f"}, + {file = "clvm_tools_rs-0.1.45-cp38-abi3-win_amd64.whl", hash = "sha256:3153335cafd180a6a308fef0d3808777fb5f7e13edd43e9ee344afd212e05516"}, + {file = "clvm_tools_rs-0.1.45.tar.gz", hash = "sha256:73848e0504e42bbd9627497c7307147cce9a04678936b5aec5c5a6be4b372c84"}, ] [[package]] @@ -3428,4 +3429,4 @@ upnp = ["miniupnpc"] [metadata] lock-version = "2.0" python-versions = ">=3.9, <3.13" -content-hash = "b3e7954f358d50320cf0aa1fe16dee1f17916c45e14377b838c671240e4962b8" +content-hash = "fe87fd693372aa1c4e1596ce31c01cee1e50c5f7f24b9099c80410fde037568d" diff --git a/pyproject.toml b/pyproject.toml index dbdc231df899..92db76773d38 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -51,7 +51,7 @@ chiavdf = "1.1.8" # timelord and vdf verification click = "8.1.7" # For the CLI clvm = "0.9.10" clvm_tools = "0.4.9" # Currying Program.to other conveniences -clvm_tools_rs = "0.1.43" # Rust implementation of clvm_tools' compiler +clvm_tools_rs = "0.1.45" # Rust implementation of clvm_tools' compiler colorama = "0.4.6" # Colorizes terminal output colorlog = "6.8.2" # Adds color to logs concurrent_log_handler = "0.9.25" # Concurrently log and rotate logs From 2df931bf879af21ccf428e8e9c3ab3d2382b6e3a Mon Sep 17 00:00:00 2001 From: Arvid Norberg Date: Fri, 25 Oct 2024 17:38:23 +0200 Subject: [PATCH 64/69] [CHIA-1566] pipeline block validation in sync_from_fork_point() (#18703) * make pre_validate_multi_processing() return futures, rather than the awaited and joined results from the thread jobs. This is crucial to allow the validaion jobs to run concurrently with the database interaction in the main thread * Split up the add_block_batch() function into 3 parts, preparing them to be pipelined in long-sync * pass AugmentedBlockchain into pre_validate_multiprocessing() and prevalidate_blocks(), to support the augmented blockchain to outlive a single batch * pipeline pre-validation in sync_from_forkpoint() * optimize the common case of _reconsider_peak(), to avoid one database lookup * address review comments * rename filter_blocks() -> skip_blocks(). It's is only relevant at the start of the sync, add comment and assert --- .../blockchain/blockchain_test_utils.py | 7 +- chia/_tests/blockchain/test_blockchain.py | 64 ++-- chia/_tests/core/full_node/test_full_node.py | 13 +- .../test_third_party_harvesters.py | 6 +- chia/_tests/util/full_sync.py | 2 + chia/_tests/wallet/sync/test_wallet_sync.py | 3 + chia/consensus/blockchain.py | 4 + chia/consensus/multiprocess_validation.py | 55 ++-- chia/full_node/full_node.py | 307 +++++++++++++----- chia/simulator/add_blocks_in_batches.py | 2 + chia/simulator/full_node_simulator.py | 11 +- chia/util/augmented_chain.py | 5 + tools/test_full_sync.py | 3 + 13 files changed, 332 insertions(+), 150 deletions(-) diff --git a/chia/_tests/blockchain/blockchain_test_utils.py b/chia/_tests/blockchain/blockchain_test_utils.py index 8c2217e685a8..bf786e00f7f7 100644 --- a/chia/_tests/blockchain/blockchain_test_utils.py +++ b/chia/_tests/blockchain/blockchain_test_utils.py @@ -1,5 +1,6 @@ from __future__ import annotations +import asyncio from typing import Optional from chia_rs import BLSCache @@ -10,6 +11,7 @@ from chia.consensus.multiprocess_validation import PreValidationResult, pre_validate_blocks_multiprocessing from chia.types.full_block import FullBlock from chia.types.validation_state import ValidationState +from chia.util.augmented_chain import AugmentedBlockchain from chia.util.errors import Err from chia.util.ints import uint32, uint64 @@ -76,15 +78,16 @@ async def _validate_and_add_block( else: # validate_signatures must be False in order to trigger add_block() to # validate the signature. - pre_validation_results: list[PreValidationResult] = await pre_validate_blocks_multiprocessing( + futures = await pre_validate_blocks_multiprocessing( blockchain.constants, - blockchain, + AugmentedBlockchain(blockchain), [block], blockchain.pool, {}, ValidationState(ssi, diff, prev_ses_block), validate_signatures=False, ) + pre_validation_results: list[PreValidationResult] = list(await asyncio.gather(*futures)) assert pre_validation_results is not None results = pre_validation_results[0] if results.error is not None: diff --git a/chia/_tests/blockchain/test_blockchain.py b/chia/_tests/blockchain/test_blockchain.py index 40ad49a5b2e6..51aab5cd9924 100644 --- a/chia/_tests/blockchain/test_blockchain.py +++ b/chia/_tests/blockchain/test_blockchain.py @@ -1,5 +1,6 @@ from __future__ import annotations +import asyncio import logging import random import time @@ -52,6 +53,7 @@ from chia.types.spend_bundle import SpendBundle from chia.types.unfinished_block import UnfinishedBlock from chia.types.validation_state import ValidationState +from chia.util.augmented_chain import AugmentedBlockchain from chia.util.cpu import available_logical_cores from chia.util.errors import Err from chia.util.generator_tools import get_block_header @@ -1790,15 +1792,16 @@ async def test_pre_validation_fails_bad_blocks(self, empty_blockchain: Blockchai block_bad = recursive_replace( blocks[-1], "reward_chain_block.total_iters", blocks[-1].reward_chain_block.total_iters + 1 ) - res = await pre_validate_blocks_multiprocessing( + futures = await pre_validate_blocks_multiprocessing( empty_blockchain.constants, - empty_blockchain, + AugmentedBlockchain(empty_blockchain), [blocks[0], block_bad], empty_blockchain.pool, {}, ValidationState(ssi, difficulty, None), validate_signatures=True, ) + res: list[PreValidationResult] = list(await asyncio.gather(*futures)) assert res[0].error is None assert res[1].error is not None @@ -1817,15 +1820,16 @@ async def test_pre_validation( end_i = min(i + n_at_a_time, len(blocks)) blocks_to_validate = blocks[i:end_i] start_pv = time.time() - res = await pre_validate_blocks_multiprocessing( + futures = await pre_validate_blocks_multiprocessing( empty_blockchain.constants, - empty_blockchain, + AugmentedBlockchain(empty_blockchain), blocks_to_validate, empty_blockchain.pool, {}, ValidationState(ssi, difficulty, None), validate_signatures=True, ) + res: list[PreValidationResult] = list(await asyncio.gather(*futures)) end_pv = time.time() times_pv.append(end_pv - start_pv) assert res is not None @@ -1924,15 +1928,16 @@ async def test_conditions( ) ssi = b.constants.SUB_SLOT_ITERS_STARTING diff = b.constants.DIFFICULTY_STARTING - pre_validation_results: list[PreValidationResult] = await pre_validate_blocks_multiprocessing( + futures = await pre_validate_blocks_multiprocessing( b.constants, - b, + AugmentedBlockchain(b), [blocks[-1]], b.pool, {}, ValidationState(ssi, diff, None), validate_signatures=False, ) + pre_validation_results: list[PreValidationResult] = list(await asyncio.gather(*futures)) # Ignore errors from pre-validation, we are testing block_body_validation repl_preval_results = replace(pre_validation_results[0], error=None, required_iters=uint64(1)) block = blocks[-1] @@ -2050,15 +2055,16 @@ async def test_timelock_conditions( ) ssi = b.constants.SUB_SLOT_ITERS_STARTING diff = b.constants.DIFFICULTY_STARTING - pre_validation_results: list[PreValidationResult] = await pre_validate_blocks_multiprocessing( + futures = await pre_validate_blocks_multiprocessing( b.constants, - b, + AugmentedBlockchain(b), [blocks[-1]], b.pool, {}, ValidationState(ssi, diff, None), validate_signatures=True, ) + pre_validation_results: list[PreValidationResult] = list(await asyncio.gather(*futures)) assert pre_validation_results is not None block = blocks[-1] fork_info = ForkInfo(block.height - 1, block.height - 1, block.prev_header_hash) @@ -2133,15 +2139,16 @@ async def test_aggsig_garbage( ) ssi = b.constants.SUB_SLOT_ITERS_STARTING diff = b.constants.DIFFICULTY_STARTING - pre_validation_results: list[PreValidationResult] = await pre_validate_blocks_multiprocessing( + futures = await pre_validate_blocks_multiprocessing( b.constants, - b, + AugmentedBlockchain(b), [blocks[-1]], b.pool, {}, ValidationState(ssi, diff, None), validate_signatures=False, ) + pre_validation_results: list[PreValidationResult] = list(await asyncio.gather(*futures)) # Ignore errors from pre-validation, we are testing block_body_validation repl_preval_results = replace(pre_validation_results[0], error=None, required_iters=uint64(1)) block = blocks[-1] @@ -2261,15 +2268,16 @@ async def test_ephemeral_timelock( ) ssi = b.constants.SUB_SLOT_ITERS_STARTING diff = b.constants.DIFFICULTY_STARTING - pre_validation_results: list[PreValidationResult] = await pre_validate_blocks_multiprocessing( + futures = await pre_validate_blocks_multiprocessing( b.constants, - b, + AugmentedBlockchain(b), [blocks[-1]], b.pool, {}, ValidationState(ssi, diff, None), validate_signatures=True, ) + pre_validation_results: list[PreValidationResult] = list(await asyncio.gather(*futures)) assert pre_validation_results is not None block = blocks[-1] fork_info = ForkInfo(block.height - 1, block.height - 1, block.prev_header_hash) @@ -2627,15 +2635,16 @@ async def test_cost_exceeds_max( ) )[1] assert err in [Err.BLOCK_COST_EXCEEDS_MAX] - results: list[PreValidationResult] = await pre_validate_blocks_multiprocessing( + futures = await pre_validate_blocks_multiprocessing( b.constants, - b, + AugmentedBlockchain(b), [blocks[-1]], b.pool, {}, ValidationState(ssi, diff, None), validate_signatures=False, ) + results: list[PreValidationResult] = list(await asyncio.gather(*futures)) assert results is not None assert Err(results[0].error) == Err.BLOCK_COST_EXCEEDS_MAX @@ -3233,15 +3242,16 @@ async def test_invalid_agg_sig(self, empty_blockchain: Blockchain, bt: BlockTool # Bad signature also fails in prevalidation ssi = b.constants.SUB_SLOT_ITERS_STARTING diff = b.constants.DIFFICULTY_STARTING - preval_results = await pre_validate_blocks_multiprocessing( + futures = await pre_validate_blocks_multiprocessing( b.constants, - b, + AugmentedBlockchain(b), [last_block], b.pool, {}, ValidationState(ssi, diff, None), validate_signatures=True, ) + preval_results: list[PreValidationResult] = list(await asyncio.gather(*futures)) assert preval_results is not None assert preval_results[0].error == Err.BAD_AGGREGATE_SIGNATURE.value @@ -3352,15 +3362,16 @@ async def test_long_reorg( print(f"pre-validating {len(blocks)} blocks") ssi = b.constants.SUB_SLOT_ITERS_STARTING diff = b.constants.DIFFICULTY_STARTING - pre_validation_results: list[PreValidationResult] = await pre_validate_blocks_multiprocessing( + futures = await pre_validate_blocks_multiprocessing( b.constants, - b, + AugmentedBlockchain(b), blocks, b.pool, {}, ValidationState(ssi, diff, None), validate_signatures=False, ) + pre_validation_results: list[PreValidationResult] = list(await asyncio.gather(*futures)) for i, block in enumerate(blocks): if block.height != 0 and len(block.finished_sub_slots) > 0: if block.finished_sub_slots[0].challenge_chain.new_sub_slot_iters is not None: @@ -3919,29 +3930,29 @@ async def test_reorg_flip_flop(empty_blockchain: Blockchain, bt: BlockTools) -> block1, block2 = b1, b2 counter += 1 - preval: list[PreValidationResult] = await pre_validate_blocks_multiprocessing( + futures = await pre_validate_blocks_multiprocessing( b.constants, - b, + AugmentedBlockchain(b), [block1], b.pool, {}, ValidationState(ssi, diff, None), validate_signatures=False, ) - + preval: list[PreValidationResult] = list(await asyncio.gather(*futures)) fork_info = ForkInfo(block1.height - 1, block1.height - 1, block1.prev_header_hash) _, err, _ = await b.add_block(block1, preval[0], None, sub_slot_iters=ssi, fork_info=fork_info) assert err is None - preval = await pre_validate_blocks_multiprocessing( + futures = await pre_validate_blocks_multiprocessing( b.constants, - b, + AugmentedBlockchain(b), [block2], b.pool, {}, ValidationState(ssi, diff, None), validate_signatures=False, ) - + preval = list(await asyncio.gather(*futures)) fork_info = ForkInfo(block2.height - 1, block2.height - 1, block2.prev_header_hash) _, err, _ = await b.add_block(block2, preval[0], None, sub_slot_iters=ssi, fork_info=fork_info) assert err is None @@ -3967,15 +3978,16 @@ async def test_get_tx_peak(default_400_blocks: list[FullBlock], empty_blockchain test_blocks = default_400_blocks[:100] ssi = bc.constants.SUB_SLOT_ITERS_STARTING diff = bc.constants.DIFFICULTY_STARTING - res = await pre_validate_blocks_multiprocessing( + futures = await pre_validate_blocks_multiprocessing( bc.constants, - bc, + AugmentedBlockchain(bc), test_blocks, bc.pool, {}, ValidationState(ssi, diff, None), validate_signatures=False, ) + res: list[PreValidationResult] = list(await asyncio.gather(*futures)) last_tx_block_record = None for b, prevalidation_res in zip(test_blocks, res): diff --git a/chia/_tests/core/full_node/test_full_node.py b/chia/_tests/core/full_node/test_full_node.py index 3dff7aeed53c..b1a357839b92 100644 --- a/chia/_tests/core/full_node/test_full_node.py +++ b/chia/_tests/core/full_node/test_full_node.py @@ -24,7 +24,7 @@ from chia._tests.util.setup_nodes import SimulatorsAndWalletsServices from chia._tests.util.time_out_assert import time_out_assert, time_out_assert_custom_interval, time_out_messages from chia.consensus.block_body_validation import ForkInfo -from chia.consensus.multiprocess_validation import pre_validate_blocks_multiprocessing +from chia.consensus.multiprocess_validation import PreValidationResult, pre_validate_blocks_multiprocessing from chia.consensus.pot_iterations import is_overflow_block from chia.full_node.full_node import WalletUpdate from chia.full_node.full_node_api import FullNodeAPI @@ -63,6 +63,7 @@ from chia.types.spend_bundle import SpendBundle, estimate_fees from chia.types.unfinished_block import UnfinishedBlock from chia.types.validation_state import ValidationState +from chia.util.augmented_chain import AugmentedBlockchain from chia.util.errors import ConsensusError, Err from chia.util.hash import std_hash from chia.util.ints import uint8, uint16, uint32, uint64, uint128 @@ -430,15 +431,16 @@ async def check_transaction_confirmed(transaction) -> bool: for reorg_block in reog_blocks[:r]: await _validate_and_add_block_no_error(blockchain, reorg_block, fork_info=fork_info) for i in range(1, height): - results = await pre_validate_blocks_multiprocessing( + futures = await pre_validate_blocks_multiprocessing( blockchain.constants, - blockchain, + AugmentedBlockchain(blockchain), all_blocks[:i], blockchain.pool, {}, ValidationState(ssi, diff, None), validate_signatures=False, ) + results: list[PreValidationResult] = list(await asyncio.gather(*futures)) assert results is not None for result in results: assert result.error is None @@ -448,15 +450,16 @@ async def check_transaction_confirmed(transaction) -> bool: for block in all_blocks[:r]: await _validate_and_add_block_no_error(blockchain, block, fork_info=fork_info) for i in range(1, height): - results = await pre_validate_blocks_multiprocessing( + futures = await pre_validate_blocks_multiprocessing( blockchain.constants, - blockchain, + AugmentedBlockchain(blockchain), all_blocks[:i], blockchain.pool, {}, ValidationState(ssi, diff, None), validate_signatures=False, ) + results: list[PreValidationResult] = list(await asyncio.gather(*futures)) assert results is not None for result in results: assert result.error is None diff --git a/chia/_tests/farmer_harvester/test_third_party_harvesters.py b/chia/_tests/farmer_harvester/test_third_party_harvesters.py index 6b102f81f152..334f753580b9 100644 --- a/chia/_tests/farmer_harvester/test_third_party_harvesters.py +++ b/chia/_tests/farmer_harvester/test_third_party_harvesters.py @@ -41,6 +41,7 @@ from chia.types.full_block import FullBlock from chia.types.peer_info import UnresolvedPeerInfo from chia.types.validation_state import ValidationState +from chia.util.augmented_chain import AugmentedBlockchain from chia.util.bech32m import decode_puzzle_hash from chia.util.hash import std_hash from chia.util.ints import uint8, uint32, uint64 @@ -437,15 +438,16 @@ async def add_test_blocks_into_full_node(blocks: list[FullBlock], full_node: Ful prev_ses_block = curr new_slot = len(block.finished_sub_slots) > 0 ssi, diff = get_next_sub_slot_iters_and_difficulty(full_node.constants, new_slot, prev_b, full_node.blockchain) - pre_validation_results: list[PreValidationResult] = await pre_validate_blocks_multiprocessing( + futures = await pre_validate_blocks_multiprocessing( full_node.blockchain.constants, - full_node.blockchain, + AugmentedBlockchain(full_node.blockchain), blocks, full_node.blockchain.pool, {}, ValidationState(ssi, diff, prev_ses_block), validate_signatures=True, ) + pre_validation_results: list[PreValidationResult] = list(await asyncio.gather(*futures)) assert pre_validation_results is not None and len(pre_validation_results) == len(blocks) for i in range(len(blocks)): block = blocks[i] diff --git a/chia/_tests/util/full_sync.py b/chia/_tests/util/full_sync.py index 2427b7749e2d..a8d48573290f 100644 --- a/chia/_tests/util/full_sync.py +++ b/chia/_tests/util/full_sync.py @@ -28,6 +28,7 @@ from chia.types.full_block import FullBlock from chia.types.peer_info import PeerInfo from chia.types.validation_state import ValidationState +from chia.util.augmented_chain import AugmentedBlockchain from chia.util.config import load_config from chia.util.ints import uint16 @@ -212,6 +213,7 @@ async def run_sync_test( fork_height = block_batch[0].height - 1 header_hash = block_batch[0].prev_header_hash success, summary, err = await full_node.add_block_batch( + AugmentedBlockchain(full_node.blockchain), block_batch, peer_info, ForkInfo(fork_height, fork_height, header_hash), diff --git a/chia/_tests/wallet/sync/test_wallet_sync.py b/chia/_tests/wallet/sync/test_wallet_sync.py index 76055d61da66..80ba712e9e1c 100644 --- a/chia/_tests/wallet/sync/test_wallet_sync.py +++ b/chia/_tests/wallet/sync/test_wallet_sync.py @@ -44,6 +44,7 @@ from chia.types.full_block import FullBlock from chia.types.peer_info import PeerInfo from chia.types.validation_state import ValidationState +from chia.util.augmented_chain import AugmentedBlockchain from chia.util.hash import std_hash from chia.util.ints import uint32, uint64, uint128 from chia.wallet.nft_wallet.nft_wallet import NFTWallet @@ -364,6 +365,7 @@ async def test_long_sync_wallet( ) fork_height = blocks_reorg[-num_blocks - 10].height - 1 await full_node.add_block_batch( + AugmentedBlockchain(full_node.blockchain), blocks_reorg[-num_blocks - 10 : -1], PeerInfo("0.0.0.0", 0), ForkInfo(fork_height, fork_height, blocks_reorg[-num_blocks - 10].prev_header_hash), @@ -481,6 +483,7 @@ async def test_wallet_reorg_get_coinbase( full_node.constants, True, block_record, full_node.blockchain ) await full_node.add_block_batch( + AugmentedBlockchain(full_node.blockchain), blocks_reorg_2[-44:], PeerInfo("0.0.0.0", 0), ForkInfo(blocks_reorg_2[-45].height, blocks_reorg_2[-45].height, blocks_reorg_2[-45].header_hash), diff --git a/chia/consensus/blockchain.py b/chia/consensus/blockchain.py index 9c3f31006b2d..97d65deaf05d 100644 --- a/chia/consensus/blockchain.py +++ b/chia/consensus/blockchain.py @@ -487,6 +487,10 @@ async def _reconsider_peak( if genesis: records_to_add = [block_record] + elif fork_info.block_hashes == [block_record.header_hash]: + # in the common case, we just add a block on top of the chain. Check + # for that here to avoid an unnecessary database lookup. + records_to_add = [block_record] else: records_to_add = await self.block_store.get_block_records_by_hash(fork_info.block_hashes) diff --git a/chia/consensus/multiprocess_validation.py b/chia/consensus/multiprocess_validation.py index 2450135afb71..c4941d6f5836 100644 --- a/chia/consensus/multiprocess_validation.py +++ b/chia/consensus/multiprocess_validation.py @@ -5,7 +5,7 @@ import logging import time import traceback -from collections.abc import Sequence +from collections.abc import Awaitable, Sequence from concurrent.futures import Executor from dataclasses import dataclass from typing import Optional @@ -14,7 +14,7 @@ from chia.consensus.block_header_validation import validate_finished_header_block from chia.consensus.block_record import BlockRecord -from chia.consensus.blockchain_interface import BlockRecordsProtocol, BlocksProtocol +from chia.consensus.blockchain_interface import BlockRecordsProtocol from chia.consensus.constants import ConsensusConstants from chia.consensus.full_block_to_block_record import block_to_block_record from chia.consensus.get_block_challenge import get_block_challenge @@ -132,7 +132,7 @@ def pre_validate_block( async def pre_validate_blocks_multiprocessing( constants: ConsensusConstants, - block_records: BlocksProtocol, + blockchain: AugmentedBlockchain, blocks: Sequence[FullBlock], pool: Executor, block_height_conds_map: dict[uint32, SpendBundleConditions], @@ -140,32 +140,40 @@ async def pre_validate_blocks_multiprocessing( *, wp_summaries: Optional[list[SubEpochSummary]] = None, validate_signatures: bool = True, -) -> list[PreValidationResult]: +) -> Sequence[Awaitable[PreValidationResult]]: """ This method must be called under the blockchain lock - If all the full blocks pass pre-validation, (only validates header), returns the list of required iters. - if any validation issue occurs, returns False. + The blocks passed to this function are submitted to be validated in the + executor passed in as "pool". The futures for those jobs are then returned. + When awaited, the return value is the PreValidationResult for each block. + The PreValidationResult indicates whether the block was valid or not. Args: constants: - pool: - constants: - block_records: + blockchain: The blockchain object to validate these blocks with respect to. + It's an AugmentedBlockchain to allow for previous batches of blocks to + be included, even if they haven't been added to the underlying blockchain + database yet. The blocks passed in will be added/augmented onto this blockchain. + pool: The executor to submit the validation jobs to blocks: list of full blocks to validate (must be connected to current chain) - npc_results + vs: The ValidationState refers to the state for the first block in the batch. + This is an in-out parameter that will be updated to the validation state + for the next batch of blocks. It includes subslot iterators, difficulty and + the previous sub epoch summary (ses) block. + wp_summaries: + validate_signatures: """ prev_b: Optional[BlockRecord] = None + async def return_error(error_code: Err) -> PreValidationResult: + return PreValidationResult(uint16(error_code.value), None, None, False, uint32(0)) + if blocks[0].height > 0: - curr = block_records.try_block_record(blocks[0].prev_header_hash) + curr = blockchain.try_block_record(blocks[0].prev_header_hash) if curr is None: - return [PreValidationResult(uint16(Err.INVALID_PREV_BLOCK_HASH.value), None, None, False, uint32(0))] + return [return_error(Err.INVALID_PREV_BLOCK_HASH)] prev_b = curr - # the agumented blockchain object will let us add temporary block records - # they won't actually be added to the underlying blockchain object - blockchain = AugmentedBlockchain(block_records) - futures = [] # Pool of workers to validate blocks concurrently @@ -186,7 +194,7 @@ async def pre_validate_blocks_multiprocessing( block.reward_chain_block.proof_of_space, constants, challenge, cc_sp_hash, height=block.height ) if q_str is None: - return [PreValidationResult(uint16(Err.INVALID_POSPACE.value), None, None, False, uint32(0))] + return [return_error(Err.INVALID_POSPACE)] required_iters: uint64 = calculate_iterations_quality( constants.DIFFICULTY_CONSTANT_FACTOR, @@ -207,13 +215,13 @@ async def pre_validate_blocks_multiprocessing( ) except ValueError: log.exception("block_to_block_record()") - return [PreValidationResult(uint16(Err.INVALID_SUB_EPOCH_SUMMARY.value), None, None, False, uint32(0))] + return [return_error(Err.INVALID_SUB_EPOCH_SUMMARY)] if block_rec.sub_epoch_summary_included is not None and wp_summaries is not None: next_ses = wp_summaries[int(block.height / constants.SUB_EPOCH_BLOCKS) - 1] if not block_rec.sub_epoch_summary_included.get_hash() == next_ses.get_hash(): log.error("sub_epoch_summary does not match wp sub_epoch_summary list") - return [PreValidationResult(uint16(Err.INVALID_SUB_EPOCH_SUMMARY.value), None, None, False, uint32(0))] + return [return_error(Err.INVALID_SUB_EPOCH_SUMMARY)] blockchain.add_extra_block(block, block_rec) # Temporarily add block to chain prev_b = block_rec @@ -227,11 +235,7 @@ async def pre_validate_blocks_multiprocessing( if block_generator is not None: previous_generators = block_generator.generator_refs except ValueError: - return [ - PreValidationResult( - uint16(Err.FAILED_GETTING_GENERATOR_MULTIPROCESSING.value), None, None, False, uint32(0) - ) - ] + return [return_error(Err.FAILED_GETTING_GENERATOR_MULTIPROCESSING)] futures.append( asyncio.get_running_loop().run_in_executor( @@ -250,5 +254,4 @@ async def pre_validate_blocks_multiprocessing( if block_rec.sub_epoch_summary_included is not None: vs.prev_ses_block = block_rec - # Collect all results into one flat list - return list(await asyncio.gather(*futures)) + return futures diff --git a/chia/full_node/full_node.py b/chia/full_node/full_node.py index 29fdfaeab1ee..e72689c129a2 100644 --- a/chia/full_node/full_node.py +++ b/chia/full_node/full_node.py @@ -10,7 +10,7 @@ import sqlite3 import time import traceback -from collections.abc import AsyncIterator, Awaitable +from collections.abc import AsyncIterator, Awaitable, Sequence from multiprocessing.context import BaseContext from pathlib import Path from typing import TYPE_CHECKING, Any, Callable, ClassVar, Optional, TextIO, Union, cast, final @@ -76,6 +76,7 @@ from chia.types.unfinished_block import UnfinishedBlock from chia.types.validation_state import ValidationState from chia.types.weight_proof import WeightProof +from chia.util.augmented_chain import AugmentedBlockchain from chia.util.bech32m import encode_puzzle_hash from chia.util.check_fork_next_block import check_fork_next_block from chia.util.config import process_config_start_method @@ -85,7 +86,6 @@ from chia.util.errors import ConsensusError, Err, TimestampError, ValidationError from chia.util.ints import uint8, uint32, uint64, uint128 from chia.util.limited_semaphore import LimitedSemaphore -from chia.util.log_exceptions import log_exceptions from chia.util.path import path_from_root from chia.util.profiler import enable_profiler, mem_profile_task, profile_task from chia.util.safe_cancel_task import cancel_task_safe @@ -604,7 +604,7 @@ async def short_sync_batch(self, peer: WSChiaConnection, start_height: uint32, t ) vs = ValidationState(ssi, diff, None) success, state_change_summary, err = await self.add_block_batch( - response.blocks, peer_info, fork_info, vs + AugmentedBlockchain(self.blockchain), response.blocks, peer_info, fork_info, vs ) if not success: raise ValueError(f"Error short batch syncing, failed to validate blocks {height}-{end_height}") @@ -1056,7 +1056,6 @@ async def sync_from_fork_point( peak_hash: bytes32, summaries: list[SubEpochSummary], ) -> None: - buffer_size = 4 self.log.info(f"Start syncing from fork point at {fork_point_height} up to {target_peak_sb_height}") peers_with_peak: list[WSChiaConnection] = self.get_peers_with_peak(peak_hash) fork_point_height = await check_fork_next_block( @@ -1081,7 +1080,17 @@ async def sync_from_fork_point( # normally "fork_point" or "fork_height" refers to the first common # block between the main chain and the fork. Here "fork_point_height" # seems to refer to the first diverging block - fork_info: Optional[ForkInfo] = None + # in case we're validating a reorg fork (i.e. not extending the + # main chain), we need to record the coin set from that fork in + # fork_info. Otherwise validation is very expensive, especially + # for deep reorgs + if fork_point_height > 0: + fork_hash = self.blockchain.height_to_hash(uint32(fork_point_height - 1)) + assert fork_hash is not None + else: + fork_hash = self.constants.GENESIS_CHALLENGE + fork_info = ForkInfo(fork_point_height - 1, fork_point_height - 1, fork_hash) + if fork_point_height == 0: ssi = self.constants.SUB_SLOT_ITERS_STARTING diff = self.constants.DIFFICULTY_STARTING @@ -1092,11 +1101,16 @@ async def sync_from_fork_point( prev_b = await self.blockchain.get_full_block(prev_b_hash) assert prev_b is not None ssi, diff, prev_ses_block = await self.get_sub_slot_iters_difficulty_ses_block(prev_b, None, None) - vs = ValidationState(ssi, diff, prev_ses_block) - async def fetch_block_batches( - batch_queue: asyncio.Queue[Optional[tuple[WSChiaConnection, list[FullBlock]]]] - ) -> None: + # we need an augmented blockchain to validate blocks in batches. The + # batch must be treated as if it's part of the chain to validate the + # blocks in it. We also need them to keep appearing as if they're part + # of the chain when pipelining the validation of blocks. We start + # validating the next batch while still adding the first batch to the + # chain. + blockchain = AugmentedBlockchain(self.blockchain) + + async def fetch_blocks(output_queue: asyncio.Queue[Optional[tuple[WSChiaConnection, list[FullBlock]]]]) -> None: start_height, end_height = 0, 0 new_peers_with_peak: list[WSChiaConnection] = peers_with_peak[:] try: @@ -1110,11 +1124,22 @@ async def fetch_block_batches( for peer in random.sample(new_peers_with_peak, len(new_peers_with_peak)): if peer.closed: continue + start = time.monotonic() response = await peer.call_api(FullNodeAPI.request_blocks, request, timeout=30) + end = time.monotonic() + if end - start > 5: + self.log.info(f"sync pipeline, peer took {end-start:0.2f} to respond to request_blocks") if response is None: await peer.close() elif isinstance(response, RespondBlocks): - await batch_queue.put((peer, response.blocks)) + start = time.monotonic() + await output_queue.put((peer, response.blocks)) + end = time.monotonic() + if end - start > 1: + self.log.info( + f"sync pipeline back-pressure. stalled {end-start:0.2f} " + "seconds on prevalidate block" + ) fetched = True break if fetched is False: @@ -1127,52 +1152,102 @@ async def fetch_block_batches( self.log.error(f"Exception fetching {start_height} to {end_height} from peer {e}") finally: # finished signal with None - await batch_queue.put(None) + await output_queue.put(None) + + async def validate_blocks( + input_queue: asyncio.Queue[Optional[tuple[WSChiaConnection, list[FullBlock]]]], + output_queue: asyncio.Queue[ + Optional[ + tuple[WSChiaConnection, ValidationState, list[Awaitable[PreValidationResult]], list[FullBlock]] + ] + ], + ) -> None: + nonlocal blockchain + nonlocal fork_info + first_batch = True + + vs = ValidationState(ssi, diff, prev_ses_block) - async def validate_block_batches( - inner_batch_queue: asyncio.Queue[Optional[tuple[WSChiaConnection, list[FullBlock]]]] + try: + while True: + res: Optional[tuple[WSChiaConnection, list[FullBlock]]] = await input_queue.get() + if res is None: + self.log.debug("done fetching blocks") + return None + peer, blocks = res + + # skip_blocks is only relevant at the start of the sync, + # to skip blocks we already have in the database (and have + # been validated). Once we start validating blocks, we + # shouldn't be skipping any. + blocks_to_validate = await self.skip_blocks(blockchain, blocks, fork_info, vs) + assert first_batch or len(blocks_to_validate) == len(blocks) + next_validation_state = copy.copy(vs) + + if len(blocks_to_validate) == 0: + continue + + first_batch = False + + futures: list[Awaitable[PreValidationResult]] = [] + for block in blocks_to_validate: + futures.extend( + await self.prevalidate_blocks( + blockchain, + [block], + peer.peer_info, + vs, + summaries, + ) + ) + start = time.monotonic() + await output_queue.put((peer, next_validation_state, list(futures), blocks_to_validate)) + end = time.monotonic() + if end - start > 1: + self.log.info(f"sync pipeline back-pressure. stalled {end-start:0.2f} seconds on add_block()") + except Exception: + self.log.exception("Exception validating") + finally: + # finished signal with None + await output_queue.put(None) + + async def ingest_blocks( + input_queue: asyncio.Queue[ + Optional[ + tuple[WSChiaConnection, ValidationState, list[Awaitable[PreValidationResult]], list[FullBlock]] + ] + ], ) -> None: nonlocal fork_info block_rate = 0 block_rate_time = time.monotonic() block_rate_height = -1 while True: - res: Optional[tuple[WSChiaConnection, list[FullBlock]]] = await inner_batch_queue.get() + res = await input_queue.get() if res is None: - self.log.debug("done fetching blocks") + self.log.debug("done validating blocks") return None - peer, blocks = res + peer, vs, futures, blocks = res start_height = blocks[0].height end_height = blocks[-1].height if block_rate_height == -1: block_rate_height = start_height - # in case we're validating a reorg fork (i.e. not extending the - # main chain), we need to record the coin set from that fork in - # fork_info. Otherwise validation is very expensive, especially - # for deep reorgs - peak: Optional[BlockRecord] - if fork_info is None: - if fork_point_height > 0: - fork_hash = self.blockchain.height_to_hash(uint32(fork_point_height - 1)) - assert fork_hash is not None - else: - fork_hash = self.constants.GENESIS_CHALLENGE - fork_info = ForkInfo(fork_point_height - 1, fork_point_height - 1, fork_hash) - + pre_validation_results = list(await asyncio.gather(*futures)) # The ValidationState object (vs) is an in-out parameter. the add_block_batch() # call will update it - success, state_change_summary, err = await self.add_block_batch( + state_change_summary, err = await self.add_prevalidated_blocks( + blockchain, blocks, - peer.get_peer_logging(), + pre_validation_results, fork_info, + peer.peer_info, vs, - summaries, ) - if success is False: + if err is not None: await peer.close(600) - raise ValueError(f"Failed to validate block batch {start_height} to {end_height}") + raise ValueError(f"Failed to validate block batch {start_height} to {end_height}: {err}") if end_height - block_rate_height > 100: now = time.monotonic() block_rate = int((end_height - block_rate_height) // (now - block_rate_time)) @@ -1180,7 +1255,7 @@ async def validate_block_batches( block_rate_height = end_height self.log.info(f"Added blocks {start_height} to {end_height} ({block_rate} blocks/s)") - peak = self.blockchain.get_peak() + peak: Optional[BlockRecord] = self.blockchain.get_peak() if state_change_summary is not None: assert peak is not None # Hints must be added to the DB. The other post-processing tasks are not required when syncing @@ -1197,17 +1272,32 @@ async def validate_block_batches( # height, in that case. self.blockchain.clean_block_record(end_height - self.constants.BLOCKS_CACHE_SIZE) - batch_queue_input: asyncio.Queue[Optional[tuple[WSChiaConnection, list[FullBlock]]]] = asyncio.Queue( - maxsize=buffer_size - ) - fetch_task = asyncio.Task(fetch_block_batches(batch_queue_input)) - validate_task = asyncio.Task(validate_block_batches(batch_queue_input)) + block_queue: asyncio.Queue[Optional[tuple[WSChiaConnection, list[FullBlock]]]] = asyncio.Queue(maxsize=10) + validation_queue: asyncio.Queue[ + Optional[tuple[WSChiaConnection, ValidationState, list[Awaitable[PreValidationResult]], list[FullBlock]]] + ] = asyncio.Queue(maxsize=10) + + fetch_task = asyncio.create_task(fetch_blocks(block_queue)) + validate_task = asyncio.create_task(validate_blocks(block_queue, validation_queue)) + ingest_task = asyncio.create_task(ingest_blocks(validation_queue)) try: - with log_exceptions(log=self.log, message="sync from fork point failed"): - await asyncio.gather(fetch_task, validate_task) + await asyncio.gather(fetch_task, validate_task, ingest_task) except Exception: - assert validate_task.done() - fetch_task.cancel() # no need to cancel validate_task, if we end up here validate_task is already done + self.log.exception("sync from fork point failed") + finally: + cancel_task_safe(validate_task, self.log) + cancel_task_safe(fetch_task) + cancel_task_safe(ingest_task) + + # we still need to await all the pending futures of the + # prevalidation steps posted to the thread pool + while not validation_queue.empty(): + result = validation_queue.get_nowait() + if result is None: + continue + + _, _, futures, _ = result + await asyncio.gather(*futures) def get_peers_with_peak(self, peak_hash: bytes32) -> list[WSChiaConnection]: peer_ids: set[bytes32] = self.sync_store.get_peers_that_have_peak([peak_hash]) @@ -1265,6 +1355,7 @@ async def update_wallets(self, wallet_update: WalletUpdate) -> None: async def add_block_batch( self, + blockchain: AugmentedBlockchain, all_blocks: list[FullBlock], peer_info: PeerInfo, fork_info: ForkInfo, @@ -1274,15 +1365,56 @@ async def add_block_batch( # Precondition: All blocks must be contiguous blocks, index i+1 must be the parent of index i # Returns a bool for success, as well as a StateChangeSummary if the peak was advanced + pre_validate_start = time.monotonic() + blockchain = AugmentedBlockchain(self.blockchain) + blocks_to_validate = await self.skip_blocks(blockchain, all_blocks, fork_info, vs) + + if len(blocks_to_validate) == 0: + return True, None, None + + futures = await self.prevalidate_blocks( + blockchain, + blocks_to_validate, + peer_info, + copy.copy(vs), + wp_summaries, + ) + pre_validation_results = list(await asyncio.gather(*futures)) + + agg_state_change_summary, err = await self.add_prevalidated_blocks( + blockchain, + blocks_to_validate, + pre_validation_results, + fork_info, + peer_info, + vs, + ) + + if agg_state_change_summary is not None: + self._state_changed("new_peak") + self.log.debug( + f"Total time for {len(blocks_to_validate)} blocks: {time.monotonic() - pre_validate_start}, " + f"advanced: True" + ) + return err is None, agg_state_change_summary, err + + async def skip_blocks( + self, + blockchain: AugmentedBlockchain, + all_blocks: list[FullBlock], + fork_info: ForkInfo, + vs: ValidationState, # in-out parameter + ) -> list[FullBlock]: + blocks_to_validate: list[FullBlock] = [] for i, block in enumerate(all_blocks): header_hash = block.header_hash - block_rec = await self.blockchain.get_block_record_from_db(header_hash) + block_rec = await blockchain.get_block_record_from_db(header_hash) if block_rec is None: blocks_to_validate = all_blocks[i:] break else: - self.blockchain.add_block_record(block_rec) + blockchain.add_block_record(block_rec) if block_rec.sub_epoch_summary_included: # already validated block, update sub slot iters, difficulty and prev sub epoch summary vs.prev_ses_block = block_rec @@ -1298,7 +1430,7 @@ async def add_block_batch( # we have already validated this block once, no need to do it again. # however, if this block is not part of the main chain, we need to # update the fork context with its additions and removals - if self.blockchain.height_to_hash(block.height) == header_hash: + if blockchain.height_to_hash(block.height) == header_hash: # we're on the main chain, just fast-forward the fork height fork_info.reset(block.height, header_hash) else: @@ -1307,50 +1439,57 @@ async def add_block_batch( # removals in fork_info. await self.blockchain.advance_fork_info(block, fork_info) await self.blockchain.run_single_block(block, fork_info) + return blocks_to_validate - if len(blocks_to_validate) == 0: - return True, None, None + async def prevalidate_blocks( + self, + blockchain: AugmentedBlockchain, + blocks_to_validate: list[FullBlock], + peer_info: PeerInfo, + vs: ValidationState, + wp_summaries: Optional[list[SubEpochSummary]] = None, + ) -> Sequence[Awaitable[PreValidationResult]]: + """ + This is a thin wrapper over pre_validate_blocks_multiprocessing(). + Args: + blockchain: + blocks_to_validate: + peer_info: + vs: The ValidationState for the first block in the batch. This is an in-out + parameter. It will be updated to be the validation state for the next + batch of blocks. + wp_summaries: + """ # Validates signatures in multiprocessing since they take a while, and we don't have cached transactions # for these blocks (unlike during normal operation where we validate one at a time) # We have to copy the ValidationState object to preserve it for the add_block() # call below. pre_validate_blocks_multiprocessing() will update the # object we pass in. - pre_validate_start = time.monotonic() - pre_validation_results: list[PreValidationResult] = await pre_validate_blocks_multiprocessing( - self.blockchain.constants, - self.blockchain, + return await pre_validate_blocks_multiprocessing( + self.constants, + blockchain, blocks_to_validate, self.blockchain.pool, {}, - copy.copy(vs), + vs, wp_summaries=wp_summaries, validate_signatures=True, ) - pre_validate_end = time.monotonic() - pre_validate_time = pre_validate_end - pre_validate_start - - self.log.log( - logging.WARNING if pre_validate_time > 10 else logging.DEBUG, - f"Block pre-validation: {pre_validate_end - pre_validate_start:0.2f}s " - f"CLVM: {sum(pvr.timing/1000.0 for pvr in pre_validation_results):0.2f}s " - f"({len(blocks_to_validate)} blocks, start height: {blocks_to_validate[0].height})", - ) - - for i, block in enumerate(blocks_to_validate): - if pre_validation_results[i].error is not None: - self.log.error( - f"Invalid block from peer: {peer_info} height {block.height} {Err(pre_validation_results[i].error)}" - ) - return ( - False, - None, - Err(pre_validation_results[i].error), - ) + async def add_prevalidated_blocks( + self, + blockchain: AugmentedBlockchain, + blocks_to_validate: list[FullBlock], + pre_validation_results: list[PreValidationResult], + fork_info: ForkInfo, + peer_info: PeerInfo, + vs: ValidationState, # in-out parameter + ) -> tuple[Optional[StateChangeSummary], Optional[Err]]: agg_state_change_summary: Optional[StateChangeSummary] = None block_record = await self.blockchain.get_block_record_from_db(blocks_to_validate[0].prev_header_hash) for i, block in enumerate(blocks_to_validate): + header_hash = block.header_hash assert vs.prev_ses_block is None or vs.prev_ses_block.height < block.height assert pre_validation_results[i].required_iters is not None state_change_summary: Optional[StateChangeSummary] @@ -1373,12 +1512,13 @@ async def add_block_batch( result, error, state_change_summary = await self.blockchain.add_block( block, pre_validation_results[i], None, vs.current_ssi, fork_info, prev_ses_block=vs.prev_ses_block ) + if error is None: + blockchain.remove_extra_block(header_hash) if result == AddBlockResult.NEW_PEAK: # since this block just added a new peak, we've don't need any # fork history from fork_info anymore - if fork_info is not None: - fork_info.reset(block.height, block.header_hash) + fork_info.reset(block.height, header_hash) assert state_change_summary is not None # Since all blocks are contiguous, we can simply append the rollback changes and npc results if agg_state_change_summary is None: @@ -1397,8 +1537,8 @@ async def add_block_batch( elif result == AddBlockResult.INVALID_BLOCK or result == AddBlockResult.DISCONNECTED_BLOCK: if error is not None: self.log.error(f"Error: {error}, Invalid block from peer: {peer_info} ") - return False, agg_state_change_summary, error - block_record = self.blockchain.block_record(block.header_hash) + return agg_state_change_summary, error + block_record = blockchain.block_record(header_hash) assert block_record is not None if block_record.sub_epoch_summary_included is not None: vs.prev_ses_block = block_record @@ -1406,11 +1546,7 @@ async def add_block_batch( await self.weight_proof_handler.create_prev_sub_epoch_segments() if agg_state_change_summary is not None: self._state_changed("new_peak") - self.log.debug( - f"Total time for {len(blocks_to_validate)} blocks: {time.monotonic() - pre_validate_start}, " - f"advanced: True" - ) - return True, agg_state_change_summary, None + return agg_state_change_summary, None async def get_sub_slot_iters_difficulty_ses_block( self, block: FullBlock, ssi: Optional[uint64], diff: Optional[uint64] @@ -1843,15 +1979,16 @@ async def add_block( prev_ses_block = curr new_slot = len(block.finished_sub_slots) > 0 ssi, diff = get_next_sub_slot_iters_and_difficulty(self.constants, new_slot, prev_b, self.blockchain) - pre_validation_results = await pre_validate_blocks_multiprocessing( + futures = await pre_validate_blocks_multiprocessing( self.blockchain.constants, - self.blockchain, + AugmentedBlockchain(self.blockchain), [block], self.blockchain.pool, block_height_conds_map, ValidationState(ssi, diff, prev_ses_block), validate_signatures=False, ) + pre_validation_results = list(await asyncio.gather(*futures)) added: Optional[AddBlockResult] = None pre_validation_time = time.monotonic() - validation_start try: diff --git a/chia/simulator/add_blocks_in_batches.py b/chia/simulator/add_blocks_in_batches.py index 965e40f28186..7919fbb59098 100644 --- a/chia/simulator/add_blocks_in_batches.py +++ b/chia/simulator/add_blocks_in_batches.py @@ -9,6 +9,7 @@ from chia.types.full_block import FullBlock from chia.types.peer_info import PeerInfo from chia.types.validation_state import ValidationState +from chia.util.augmented_chain import AugmentedBlockchain from chia.util.batches import to_batches @@ -39,6 +40,7 @@ async def add_blocks_in_batches( print(f"main chain: {b.height:4} weight: {b.weight}") # vs is updated by the call to add_block_batch() success, state_change_summary, err = await full_node.add_block_batch( + AugmentedBlockchain(full_node.blockchain), block_batch.entries, PeerInfo("0.0.0.0", 0), fork_info, diff --git a/chia/simulator/full_node_simulator.py b/chia/simulator/full_node_simulator.py index ef42d7b0a15c..6d1495906cef 100644 --- a/chia/simulator/full_node_simulator.py +++ b/chia/simulator/full_node_simulator.py @@ -26,6 +26,7 @@ from chia.types.full_block import FullBlock from chia.types.spend_bundle import SpendBundle from chia.types.validation_state import ValidationState +from chia.util.augmented_chain import AugmentedBlockchain from chia.util.config import lock_and_load_config, save_config from chia.util.ints import uint8, uint32, uint64, uint128 from chia.util.timing import adjusted_timeout, backoff_times @@ -174,15 +175,16 @@ async def farm_new_transaction_block( current_blocks = await self.get_all_full_blocks() if len(current_blocks) == 0: genesis = self.bt.get_consecutive_blocks(uint8(1))[0] - pre_validation_results: list[PreValidationResult] = await pre_validate_blocks_multiprocessing( + futures = await pre_validate_blocks_multiprocessing( self.full_node.blockchain.constants, - self.full_node.blockchain, + AugmentedBlockchain(self.full_node.blockchain), [genesis], self.full_node.blockchain.pool, {}, ValidationState(ssi, diff, None), validate_signatures=True, ) + pre_validation_results: list[PreValidationResult] = list(await asyncio.gather(*futures)) assert pre_validation_results is not None fork_info = ForkInfo(-1, -1, self.full_node.constants.GENESIS_CHALLENGE) await self.full_node.blockchain.add_block( @@ -237,15 +239,16 @@ async def farm_new_block(self, request: FarmNewBlockProtocol, force_wait_for_tim current_blocks = await self.get_all_full_blocks() if len(current_blocks) == 0: genesis = self.bt.get_consecutive_blocks(uint8(1))[0] - pre_validation_results: list[PreValidationResult] = await pre_validate_blocks_multiprocessing( + futures = await pre_validate_blocks_multiprocessing( self.full_node.blockchain.constants, - self.full_node.blockchain, + AugmentedBlockchain(self.full_node.blockchain), [genesis], self.full_node.blockchain.pool, {}, ValidationState(ssi, diff, None), validate_signatures=True, ) + pre_validation_results: list[PreValidationResult] = list(await asyncio.gather(*futures)) assert pre_validation_results is not None fork_info = ForkInfo(-1, -1, self.full_node.constants.GENESIS_CHALLENGE) await self.full_node.blockchain.add_block( diff --git a/chia/util/augmented_chain.py b/chia/util/augmented_chain.py index fd4fde1fbdb7..073a8d427456 100644 --- a/chia/util/augmented_chain.py +++ b/chia/util/augmented_chain.py @@ -45,6 +45,11 @@ def add_extra_block(self, block: FullBlock, block_record: BlockRecord) -> None: self._extra_blocks[block_record.header_hash] = (block, block_record) self._height_to_hash[block_record.height] = block_record.header_hash + def remove_extra_block(self, hh: bytes32) -> None: + if hh in self._extra_blocks: + block_record = self._extra_blocks.pop(hh)[1] + del self._height_to_hash[block_record.height] + # BlocksProtocol async def lookup_block_generators(self, header_hash: bytes32, generator_refs: set[uint32]) -> dict[uint32, bytes]: diff --git a/tools/test_full_sync.py b/tools/test_full_sync.py index 4701ba8d3ab1..2af2ca12880a 100755 --- a/tools/test_full_sync.py +++ b/tools/test_full_sync.py @@ -21,6 +21,7 @@ from chia.server.ws_connection import WSChiaConnection from chia.types.full_block import FullBlock from chia.types.validation_state import ValidationState +from chia.util.augmented_chain import AugmentedBlockchain from chia.util.config import load_config @@ -165,6 +166,7 @@ async def run_sync_checkpoint( header_hash = block_batch[0].prev_header_hash success, _, err = await full_node.add_block_batch( + AugmentedBlockchain(full_node.blockchain), block_batch, peer_info, ForkInfo(fork_height, fork_height, header_hash), @@ -188,6 +190,7 @@ async def run_sync_checkpoint( fork_height = block_batch[0].height - 1 fork_header_hash = block_batch[0].prev_header_hash success, _, err = await full_node.add_block_batch( + AugmentedBlockchain(full_node.blockchain), block_batch, peer_info, ForkInfo(fork_height, fork_height, fork_header_hash), From e2c63db676dad321b8cf2a4e8b67b06f669773e3 Mon Sep 17 00:00:00 2001 From: Kyle Altendorf Date: Fri, 25 Oct 2024 15:02:26 -0400 Subject: [PATCH 65/69] remove unnecessary use of `.__members__` on `ConditionOpcode` iteration (#18767) --- chia/_tests/clvm/test_condition_codes.py | 4 ++-- chia/wallet/util/debug_spend_bundle.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/chia/_tests/clvm/test_condition_codes.py b/chia/_tests/clvm/test_condition_codes.py index 8d20e4d387c4..af75f70f0b30 100644 --- a/chia/_tests/clvm/test_condition_codes.py +++ b/chia/_tests/clvm/test_condition_codes.py @@ -9,5 +9,5 @@ def test_condition_codes_is_complete() -> None: condition_codes_path = importlib_resources.files("chia.wallet.puzzles").joinpath("condition_codes.clib") contents = condition_codes_path.read_text(encoding="utf-8") - for name, value in ConditionOpcode.__members__.items(): - assert f"(defconstant {name} {int_from_bytes(value)})" in contents + for opcode in ConditionOpcode: + assert f"(defconstant {opcode.name} {int_from_bytes(opcode.value)})" in contents diff --git a/chia/wallet/util/debug_spend_bundle.py b/chia/wallet/util/debug_spend_bundle.py index a6a836e8811a..7247ecca0a4d 100644 --- a/chia/wallet/util/debug_spend_bundle.py +++ b/chia/wallet/util/debug_spend_bundle.py @@ -12,7 +12,7 @@ from chia.util.hash import std_hash from chia.wallet.uncurried_puzzle import UncurriedPuzzle -CONDITIONS = {k: bytes(v)[0] for k, v in ConditionOpcode.__members__.items()} # pylint: disable=E1101 +CONDITIONS = {opcode.name: opcode.value[0] for opcode in ConditionOpcode} KFA = {v: k for k, v in CONDITIONS.items()} From 0354e6e1d390002216581ff42626c734b6c71b73 Mon Sep 17 00:00:00 2001 From: wjblanke Date: Fri, 25 Oct 2024 14:55:33 -0700 Subject: [PATCH 66/69] Keep track of all long sync task references (#18769) * keep track of all long sync task references * List * flake8 * flake8 * shorten comments... ooof indents * comment --- chia/full_node/full_node.py | 25 +++++++++++++++++++------ 1 file changed, 19 insertions(+), 6 deletions(-) diff --git a/chia/full_node/full_node.py b/chia/full_node/full_node.py index e72689c129a2..f5531e8d7b90 100644 --- a/chia/full_node/full_node.py +++ b/chia/full_node/full_node.py @@ -142,7 +142,7 @@ class FullNode: subscriptions: PeerSubscriptions = dataclasses.field(default_factory=PeerSubscriptions) _transaction_queue_task: Optional[asyncio.Task[None]] = None simulator_transaction_callback: Optional[Callable[[bytes32], Awaitable[None]]] = None - _sync_task: Optional[asyncio.Task[None]] = None + _sync_task_list: list[asyncio.Task[None]] = dataclasses.field(default_factory=list) _transaction_queue: Optional[TransactionQueue] = None _compact_vdf_sem: Optional[LimitedSemaphore] = None _new_peak_sem: Optional[LimitedSemaphore] = None @@ -358,15 +358,21 @@ async def manage(self) -> AsyncIterator[None]: if self._transaction_queue_task is not None: self._transaction_queue_task.cancel() cancel_task_safe(task=self.wallet_sync_task, log=self.log) - cancel_task_safe(task=self._sync_task, log=self.log) + for one_sync_task in self._sync_task_list: + if not one_sync_task.done(): + cancel_task_safe(task=one_sync_task, log=self.log) for task_id, task in list(self.full_node_store.tx_fetch_tasks.items()): cancel_task_safe(task, self.log) if self._init_weight_proof is not None: await asyncio.wait([self._init_weight_proof]) - if self._sync_task is not None: - with contextlib.suppress(asyncio.CancelledError): - await self._sync_task + for one_sync_task in self._sync_task_list: + if one_sync_task.done(): + self.log.info(f"Long sync task {one_sync_task.get_name()} done") + else: + with contextlib.suppress(asyncio.CancelledError): + self.log.info(f"Awaiting long sync task {one_sync_task.get_name()}") + await one_sync_task @property def block_store(self) -> BlockStore: @@ -770,9 +776,16 @@ async def new_peak(self, request: full_node_protocol.NewPeak, peer: WSChiaConnec if await self.short_sync_batch(peer, uint32(max(curr_peak_height - 6, 0)), request.height): return None + # Clean up task reference list (used to prevent gc from killing running tasks) + for oldtask in self._sync_task_list[:]: + if oldtask.done(): + self._sync_task_list.remove(oldtask) + # This is the either the case where we were not able to sync successfully (for example, due to the fork # point being in the past), or we are very far behind. Performs a long sync. - self._sync_task = asyncio.create_task(self._sync()) + # Multiple tasks may be created here. If we don't save all handles, a task could enter a sync object + # and be cleaned up by the GC, corrupting the sync object and possibly not allowing anything else in. + self._sync_task_list.append(asyncio.create_task(self._sync())) async def send_peak_to_timelords( self, peak_block: Optional[FullBlock] = None, peer: Optional[WSChiaConnection] = None From 6084112d485fd6061a238b9c1a4882d845280feb Mon Sep 17 00:00:00 2001 From: Matt Hauff Date: Fri, 25 Oct 2024 15:55:37 -0700 Subject: [PATCH 67/69] [CHIA-1553] Replace pylint with ruff (#18759) * Replace pylint with ruff * poetry * remove pylint * Address comments by @altendky * Undo formatting * actually undo formatting --- .github/CODEOWNERS | 1 - .github/workflows/dependency-review.yml | 2 +- .github/workflows/upload-pypi-source.yml | 2 - .pre-commit-config.yaml | 7 + CONTRIBUTING.md | 2 +- benchmarks/block_ref.py | 1 - benchmarks/streamable.py | 2 +- chia/_tests/build-init-files.py | 2 +- chia/_tests/cmds/cmd_test_utils.py | 2 +- chia/_tests/cmds/test_show.py | 2 +- chia/_tests/cmds/wallet/test_coins.py | 6 +- chia/_tests/cmds/wallet/test_dao.py | 8 +- chia/_tests/cmds/wallet/test_did.py | 18 +- chia/_tests/cmds/wallet/test_nft.py | 16 +- chia/_tests/cmds/wallet/test_notifications.py | 6 +- chia/_tests/cmds/wallet/test_vcs.py | 14 +- chia/_tests/cmds/wallet/test_wallet.py | 30 +- chia/_tests/process_junit.py | 1 - chia/_tests/util/misc.py | 2 +- chia/_tests/util/time_out_assert.py | 2 +- chia/_tests/wallet/rpc/test_wallet_rpc.py | 4 +- .../wallet/test_wallet_state_manager.py | 2 +- chia/cmds/chia.py | 2 +- chia/cmds/cmd_classes.py | 6 +- chia/cmds/signer.py | 2 +- chia/daemon/windows_signal.py | 4 +- chia/data_layer/data_store.py | 3 +- chia/full_node/full_node.py | 2 +- chia/rpc/data_layer_rpc_util.py | 4 +- chia/rpc/wallet_request_types.py | 2 - chia/rpc/wallet_rpc_api.py | 2 +- chia/server/chia_policy.py | 8 +- chia/util/db_wrapper.py | 3 +- chia/util/dump_keyring.py | 2 +- chia/util/task_timing.py | 4 +- chia/wallet/dao_wallet/dao_wallet.py | 1 - chia/wallet/nft_wallet/nft_puzzles.py | 6 +- chia/wallet/util/blind_signer_tl.py | 3 - chia/wallet/wallet.py | 2 +- chia/wallet/wallet_protocol.py | 4 +- poetry.lock | 100 +-- pylintrc | 583 ------------------ pyproject.toml | 64 +- tools/analyze-chain.py | 1 - tools/analyze_memory_profile.py | 2 +- tools/cpu_utilization.py | 1 - tools/generate_chain.py | 1 - tools/run_block.py | 2 +- tools/test_full_sync.py | 1 - 49 files changed, 184 insertions(+), 763 deletions(-) delete mode 100644 pylintrc diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 85c52a45e3c7..502ac003c694 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,5 +1,4 @@ * @Chia-Network/required-reviewers /.github/**/* @Chia-Network/actions-reviewers /PRETTY_GOOD_PRACTICES.md @altendky @Chia-Network/required-reviewers -/pylintrc @altendky @Chia-Network/required-reviewers /tests/ether.py @altendky @Chia-Network/required-reviewers diff --git a/.github/workflows/dependency-review.yml b/.github/workflows/dependency-review.yml index 53a3bbdd86e2..53b8c1209bef 100644 --- a/.github/workflows/dependency-review.yml +++ b/.github/workflows/dependency-review.yml @@ -21,5 +21,5 @@ jobs: - name: "Dependency Review" uses: actions/dependency-review-action@v4 with: - allow-dependencies-licenses: pkg:pypi/pylint, pkg:pypi/pyinstaller + allow-dependencies-licenses: pkg:pypi/pyinstaller deny-licenses: AGPL-1.0-only, AGPL-1.0-or-later, AGPL-1.0-or-later, AGPL-3.0-or-later, GPL-1.0-only, GPL-1.0-or-later, GPL-2.0-only, GPL-2.0-or-later, GPL-3.0-only, GPL-3.0-or-later diff --git a/.github/workflows/upload-pypi-source.yml b/.github/workflows/upload-pypi-source.yml index fa0f3c89de4f..7baee8f1b3d1 100644 --- a/.github/workflows/upload-pypi-source.yml +++ b/.github/workflows/upload-pypi-source.yml @@ -122,8 +122,6 @@ jobs: command: black --check --diff . - name: flake8 command: flake8 benchmarks build_scripts chia tools *.py - - name: pylint - command: pylint benchmarks build_scripts chia tools *.py - name: generated protocol tests command: | python3 -m chia._tests.util.build_network_protocol_files diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 063bfe9dc4e0..ebee9872709c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -106,3 +106,10 @@ repos: entry: ./activated.py flake8 language: system types: [python] + - repo: local + hooks: + - id: ruff + name: Ruff + entry: ./activated.py ruff check --fix + language: system + types: [python] diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 5d6e871094f3..40a501a3f153 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -65,7 +65,7 @@ The [black library](https://black.readthedocs.io/en/stable/) is used as an autom The [flake8 library](https://readthedocs.org/projects/flake8/) helps ensure consistent style. The [Mypy library](https://mypy.readthedocs.io/en/stable/) is very useful for ensuring objects are of the correct type, so try to always add the type of the return value, and the type of local variables. The [isort library](https://isort.readthedocs.io) is used to sort, group and validate imports in all python files. -The [pylint library](https://pylint.pycqa.org/en/stable/) is used to further lint all python files. +The [Ruff library](https://docs.astral.sh) is used to further lint all of the python files If you want verbose logging for tests, edit the `tests/pytest.ini` file. diff --git a/benchmarks/block_ref.py b/benchmarks/block_ref.py index 640c668773c3..1b7b35520e69 100644 --- a/benchmarks/block_ref.py +++ b/benchmarks/block_ref.py @@ -98,5 +98,4 @@ def entry_point(db_path: Path) -> None: if __name__ == "__main__": - # pylint: disable = no-value-for-parameter entry_point() diff --git a/benchmarks/streamable.py b/benchmarks/streamable.py index 386da56a6917..366310c2d4d9 100644 --- a/benchmarks/streamable.py +++ b/benchmarks/streamable.py @@ -304,4 +304,4 @@ def get_bench_results() -> BenchmarkResults: if __name__ == "__main__": - run() # pylint: disable = no-value-for-parameter + run() diff --git a/chia/_tests/build-init-files.py b/chia/_tests/build-init-files.py index 67b5840eec04..38f3269d4009 100755 --- a/chia/_tests/build-init-files.py +++ b/chia/_tests/build-init-files.py @@ -89,4 +89,4 @@ def command(verbose, root_str): raise click.ClickException("At least one __init__.py created or not a regular file") -command() # pylint: disable=no-value-for-parameter +command() diff --git a/chia/_tests/cmds/cmd_test_utils.py b/chia/_tests/cmds/cmd_test_utils.py index 4a4acc7983eb..7ddf2571d7b2 100644 --- a/chia/_tests/cmds/cmd_test_utils.py +++ b/chia/_tests/cmds/cmd_test_utils.py @@ -440,7 +440,7 @@ def run_cli_command(capsys: object, chia_root: Path, command_list: list[str]) -> argv_temp = sys.argv try: sys.argv = ["chia", "--root-path", str(chia_root)] + command_list - chia_cli() # pylint: disable=no-value-for-parameter + chia_cli() except SystemExit as e: if e.code != 0: exited_cleanly = False diff --git a/chia/_tests/cmds/test_show.py b/chia/_tests/cmds/test_show.py index f2fe27545f20..3916ce8f6dbc 100644 --- a/chia/_tests/cmds/test_show.py +++ b/chia/_tests/cmds/test_show.py @@ -79,7 +79,7 @@ async def get_block(self, header_hash: bytes32) -> Optional[FullBlock]: return full_block -RPC_CLIENT_TO_USE = ShowFullNodeRpcClient() # pylint: disable=no-value-for-parameter +RPC_CLIENT_TO_USE = ShowFullNodeRpcClient() def test_chia_show(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: diff --git a/chia/_tests/cmds/wallet/test_coins.py b/chia/_tests/cmds/wallet/test_coins.py index ceca5f538753..4839fb900fcf 100644 --- a/chia/_tests/cmds/wallet/test_coins.py +++ b/chia/_tests/cmds/wallet/test_coins.py @@ -25,7 +25,7 @@ def test_coins_get_info(capsys: object, get_test_cli_clients: tuple[TestRpcClien # set RPC Client - inst_rpc_client = TestWalletRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = TestWalletRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client command_args = ["wallet", "coins", "list", FINGERPRINT_ARG, "-i1", "-u"] # these are various things that should be in the output @@ -68,7 +68,7 @@ async def combine_coins( self.add_to_log("combine_coins", (args, tx_config, timelock_info)) return CombineCoinsResponse([STD_UTX], [STD_TX]) - inst_rpc_client = CoinsCombineRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = CoinsCombineRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client assert sum(coin.amount for coin in STD_TX.removals) < 500_000_000_000 command_args = [ @@ -173,7 +173,7 @@ async def get_coin_records_by_names( else: return [] - inst_rpc_client = CoinsSplitRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = CoinsSplitRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client target_coin_id = test_coin.name() command_args = [ diff --git a/chia/_tests/cmds/wallet/test_dao.py b/chia/_tests/cmds/wallet/test_dao.py index 6f6292459401..68a2e6a2e6d3 100644 --- a/chia/_tests/cmds/wallet/test_dao.py +++ b/chia/_tests/cmds/wallet/test_dao.py @@ -66,7 +66,7 @@ async def create_new_dao_wallet( } ) - inst_rpc_client = DAOCreateRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = DAOCreateRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client command_args = [ "dao", @@ -175,7 +175,7 @@ async def get_transaction(self, transaction_id: bytes32) -> TransactionRecord: valid_times=parse_timelock_info(tuple()), ) - inst_rpc_client = DAOCreateRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = DAOCreateRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client get_id_args = ["dao", "get_id", FINGERPRINT_ARG, "-i 2"] @@ -345,7 +345,7 @@ async def get_transaction(self, transaction_id: bytes32) -> TransactionRecord: ) # List all proposals - inst_rpc_client = DAOCreateRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = DAOCreateRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client list_args = ["dao", "list_proposals", FINGERPRINT_ARG, "-i 2"] # these are various things that should be in the output @@ -549,7 +549,7 @@ async def get_transaction(self, transaction_id: bytes32) -> TransactionRecord: valid_times=parse_timelock_info(tuple()), ) - inst_rpc_client = DAOCreateRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = DAOCreateRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client lockup_args = ["dao", "lockup_coins", FINGERPRINT_ARG, "-i 2", "-a", "1000", "-m 0.1", "--reuse"] lockup_asserts = ["Transaction submitted to nodes"] diff --git a/chia/_tests/cmds/wallet/test_did.py b/chia/_tests/cmds/wallet/test_did.py index a26d797ee53b..be16113e39ef 100644 --- a/chia/_tests/cmds/wallet/test_did.py +++ b/chia/_tests/cmds/wallet/test_did.py @@ -45,7 +45,7 @@ async def create_new_did_wallet( ) return {"wallet_id": 3, "my_did": "did:chia:testdid123456"} - inst_rpc_client = DidCreateRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = DidCreateRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client command_args = [ "wallet", @@ -78,7 +78,7 @@ def test_did_sign_message(capsys: object, get_test_cli_clients: tuple[TestRpcCli test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client - inst_rpc_client = TestWalletRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = TestWalletRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client did_id = encode_puzzle_hash(get_bytes32(1), "did:chia:") message = b"hello did world!!" @@ -106,7 +106,7 @@ async def did_set_wallet_name(self, wallet_id: int, name: str) -> dict[str, Unio self.add_to_log("did_set_wallet_name", (wallet_id, name)) return {} - inst_rpc_client = DidSetNameRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = DidSetNameRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client w_id = 3 did_name = "testdid" @@ -129,7 +129,7 @@ async def get_did_id(self, wallet_id: int) -> dict[str, str]: self.add_to_log("get_did_id", (wallet_id,)) return {"my_did": encode_puzzle_hash(get_bytes32(1), "did:chia:"), "coin_id": get_bytes32(2).hex()} - inst_rpc_client = DidGetDidRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = DidGetDidRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client w_id = 3 expected_did = encode_puzzle_hash(get_bytes32(1), "did:chia:") @@ -165,7 +165,7 @@ async def get_did_info(self, coin_id: str, latest: bool) -> dict[str, object]: } return response - inst_rpc_client = DidGetDetailsRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = DidGetDetailsRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client did_coin_id_hex = get_bytes32(1).hex() command_args = ["wallet", "did", "get_details", FINGERPRINT_ARG, "--coin_id", did_coin_id_hex] @@ -207,7 +207,7 @@ async def update_did_metadata( self.add_to_log("update_did_metadata", (wallet_id, metadata, tx_config, push, timelock_info)) return DIDUpdateMetadataResponse([STD_UTX], [STD_TX], WalletSpendBundle([], G2Element()), uint32(wallet_id)) - inst_rpc_client = DidUpdateMetadataRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = DidUpdateMetadataRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client w_id = 3 json_mdata = '{"test": true}' @@ -252,7 +252,7 @@ async def find_lost_did( self.add_to_log("find_lost_did", (coin_id, recovery_list_hash, metadata, num_verification)) return {"success": True, "latest_coin_id": get_bytes32(2).hex()} - inst_rpc_client = DidFindLostRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = DidFindLostRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client c_id = get_bytes32(1) json_mdata = '{"test": true}' @@ -291,7 +291,7 @@ async def did_message_spend( self.add_to_log("did_message_spend", (wallet_id, tx_config, extra_conditions, push, timelock_info)) return DIDMessageSpendResponse([STD_UTX], [STD_TX], WalletSpendBundle([], G2Element())) - inst_rpc_client = DidMessageSpendRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = DidMessageSpendRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client w_id = 3 c_announcements = [get_bytes32(1), get_bytes32(2)] @@ -357,7 +357,7 @@ async def did_transfer_did( STD_TX.name, ) - inst_rpc_client = DidTransferRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = DidTransferRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client w_id = 3 t_address = encode_puzzle_hash(get_bytes32(1), "xch") diff --git a/chia/_tests/cmds/wallet/test_nft.py b/chia/_tests/cmds/wallet/test_nft.py index 5d9f74bebaaf..614e33074d2a 100644 --- a/chia/_tests/cmds/wallet/test_nft.py +++ b/chia/_tests/cmds/wallet/test_nft.py @@ -36,7 +36,7 @@ async def create_new_nft_wallet(self, did_id: str, name: Optional[str] = None) - self.add_to_log("create_new_nft_wallet", (did_id, name)) return {"wallet_id": 4} - inst_rpc_client = NFTCreateRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = NFTCreateRpcClient() did_id = encode_puzzle_hash(get_bytes32(2), "did:chia:") test_rpc_clients.wallet_rpc_client = inst_rpc_client command_args = ["wallet", "nft", "create", FINGERPRINT_ARG, "-ntest", "--did-id", did_id] @@ -54,7 +54,7 @@ def test_nft_sign_message(capsys: object, get_test_cli_clients: tuple[TestRpcCli # set RPC Client - inst_rpc_client = TestWalletRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = TestWalletRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client did_id = encode_puzzle_hash(get_bytes32(1), "nft") message = b"hello nft world!!" @@ -132,7 +132,7 @@ async def mint_nft( bytes32.zeros.hex(), ) - inst_rpc_client = NFTCreateRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = NFTCreateRpcClient() target_addr = encode_puzzle_hash(get_bytes32(2), "xch") test_rpc_clients.wallet_rpc_client = inst_rpc_client command_args = [ @@ -209,7 +209,7 @@ async def add_uri_to_nft( self.add_to_log("add_uri_to_nft", (wallet_id, nft_coin_id, key, uri, fee, tx_config, push, timelock_info)) return NFTAddURIResponse([STD_UTX], [STD_TX], uint32(wallet_id), WalletSpendBundle([], G2Element())) - inst_rpc_client = NFTAddUriRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = NFTAddUriRpcClient() nft_coin_id = get_bytes32(2).hex() test_rpc_clients.wallet_rpc_client = inst_rpc_client command_args = [ @@ -275,7 +275,7 @@ async def transfer_nft( WalletSpendBundle([], G2Element()), ) - inst_rpc_client = NFTTransferRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = NFTTransferRpcClient() nft_coin_id = get_bytes32(2).hex() target_address = encode_puzzle_hash(get_bytes32(2), "xch") test_rpc_clients.wallet_rpc_client = inst_rpc_client @@ -352,7 +352,7 @@ async def list_nfts(self, wallet_id: int, num: int = 50, start_index: int = 0) - ) return {"nft_list": nft_list} - inst_rpc_client = NFTListRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = NFTListRpcClient() launcher_ids = [bytes32([i] * 32).hex() for i in range(50, 60)] test_rpc_clients.wallet_rpc_client = inst_rpc_client command_args = [ @@ -401,7 +401,7 @@ async def set_nft_did( WalletSpendBundle([], G2Element()), ) - inst_rpc_client = NFTSetDidRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = NFTSetDidRpcClient() nft_coin_id = get_bytes32(2).hex() did_id = encode_puzzle_hash(get_bytes32(3), "did:chia:") test_rpc_clients.wallet_rpc_client = inst_rpc_client @@ -447,7 +447,7 @@ def test_nft_get_info(capsys: object, get_test_cli_clients: tuple[TestRpcClients # set RPC Client - inst_rpc_client = TestWalletRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = TestWalletRpcClient() nft_coin_id = get_bytes32(2).hex() test_rpc_clients.wallet_rpc_client = inst_rpc_client command_args = [ diff --git a/chia/_tests/cmds/wallet/test_notifications.py b/chia/_tests/cmds/wallet/test_notifications.py index 538015f0c1e6..ed9b9931f9e3 100644 --- a/chia/_tests/cmds/wallet/test_notifications.py +++ b/chia/_tests/cmds/wallet/test_notifications.py @@ -40,7 +40,7 @@ def __init__(self, name: str) -> None: return cast(TransactionRecord, FakeTransactionRecord(get_bytes32(2).hex())) - inst_rpc_client = NotificationsSendRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = NotificationsSendRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client target_ph = get_bytes32(1) target_addr = encode_puzzle_hash(target_ph, "xch") @@ -82,7 +82,7 @@ async def get_notifications(self, request: GetNotifications) -> GetNotifications [Notification(get_bytes32(1), bytes("hello", "utf8"), uint64(1000000000), uint32(50))] ) - inst_rpc_client = NotificationsGetRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = NotificationsGetRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client target_ph = get_bytes32(1) command_args = [ @@ -114,7 +114,7 @@ async def delete_notifications(self, ids: Optional[list[bytes32]] = None) -> boo self.add_to_log("delete_notifications", (ids,)) return True - inst_rpc_client = NotificationsDeleteRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = NotificationsDeleteRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client command_args = ["wallet", "notifications", "delete", FINGERPRINT_ARG, "--all"] # these are various things that should be in the output diff --git a/chia/_tests/cmds/wallet/test_vcs.py b/chia/_tests/cmds/wallet/test_vcs.py index f8d1aacdbfac..cef9ac5dcde4 100644 --- a/chia/_tests/cmds/wallet/test_vcs.py +++ b/chia/_tests/cmds/wallet/test_vcs.py @@ -55,7 +55,7 @@ async def vc_mint( ), ) - inst_rpc_client = VcsMintRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = VcsMintRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client did_bytes = get_bytes32(1) did_id = encode_puzzle_hash(did_bytes, "did:chia:") @@ -108,7 +108,7 @@ def __getattr__(self, item: str) -> Any: records = [cast(VCRecord, FakeVC())] return records, proofs - inst_rpc_client = VcsGetRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = VcsGetRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client command_args = ["wallet", "vcs", "get", FINGERPRINT_ARG, "-s10", "-c10"] # these are various things that should be in the output @@ -144,7 +144,7 @@ async def vc_spend( ) return VCSpendResponse([STD_UTX], [STD_TX]) - inst_rpc_client = VcsUpdateProofsRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = VcsUpdateProofsRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client vc_bytes = get_bytes32(1) target_ph = get_bytes32(2) @@ -196,7 +196,7 @@ async def vc_add_proofs(self, proofs: dict[str, Any]) -> None: self.add_to_log("vc_add_proofs", (proofs,)) return None - inst_rpc_client = VcsAddProofRevealRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = VcsAddProofRevealRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client new_proof = "test_proof" command_args = ["wallet", "vcs", "add_proof_reveal", FINGERPRINT_ARG, f"-p{new_proof}"] @@ -220,7 +220,7 @@ async def vc_get_proofs_for_root(self, root: bytes32) -> dict[str, Any]: self.add_to_log("vc_get_proofs_for_root", (root,)) return {"test_proof": "1", "test_proof2": "1"} - inst_rpc_client = VcsGetProofsForRootRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = VcsGetProofsForRootRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client proof_hash = get_bytes32(1) command_args = ["wallet", "vcs", "get_proofs_for_root", FINGERPRINT_ARG, f"-r{proof_hash.hex()}"] @@ -261,7 +261,7 @@ async def vc_revoke( self.add_to_log("vc_revoke", (vc_parent_id, tx_config, fee, push, timelock_info)) return VCRevokeResponse([STD_UTX], [STD_TX]) - inst_rpc_client = VcsRevokeRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = VcsRevokeRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client parent_id = get_bytes32(1) vc_id = get_bytes32(2) @@ -330,7 +330,7 @@ async def crcat_approve_pending( ) return [STD_TX] - inst_rpc_client = VcsApproveRCATSRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = VcsApproveRCATSRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client wallet_id = uint32(2) command_args = [ diff --git a/chia/_tests/cmds/wallet/test_wallet.py b/chia/_tests/cmds/wallet/test_wallet.py index 64407d2efe88..ebd1a51d6bd8 100644 --- a/chia/_tests/cmds/wallet/test_wallet.py +++ b/chia/_tests/cmds/wallet/test_wallet.py @@ -60,7 +60,7 @@ def test_get_transaction(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client - inst_rpc_client = TestWalletRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = TestWalletRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client # get output with all options but verbose command_args = ["wallet", "get_transaction", WALLET_ID_ARG, "-tx", bytes32_hexstr] @@ -149,7 +149,7 @@ async def get_coin_records(self, request: GetCoinRecords) -> dict[str, Any]: "total_count": 1, } - inst_rpc_client = GetTransactionsWalletRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = GetTransactionsWalletRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client # get output with all options but verbose command_args = [ @@ -269,7 +269,7 @@ async def get_connections( } ] - inst_rpc_client = ShowRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = ShowRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client command_args = ["wallet", "show", FINGERPRINT_ARG] assert_list = [ @@ -376,7 +376,7 @@ async def cat_spend( ) return CATSpendResponse([STD_UTX], [STD_TX], STD_TX, STD_TX.name) - inst_rpc_client = SendWalletRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = SendWalletRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client # get output with all options but verbose addr = encode_puzzle_hash(get_bytes32(3), "xch") @@ -479,7 +479,7 @@ async def get_next_address(self, wallet_id: int, new_address: bool) -> str: return encode_puzzle_hash(get_bytes32(3), "xch") return encode_puzzle_hash(get_bytes32(4), "xch") - inst_rpc_client = GetAddressWalletRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = GetAddressWalletRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client # get output with all options but verbose addr1 = encode_puzzle_hash(get_bytes32(3), "xch") @@ -526,7 +526,7 @@ async def spend_clawback_coins( ], } - inst_rpc_client = ClawbackWalletRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = ClawbackWalletRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client tx_ids = [get_bytes32(3), get_bytes32(4), get_bytes32(5)] r_tx_ids_hex = [get_bytes32(6).hex(), get_bytes32(7).hex(), get_bytes32(8).hex()] @@ -560,7 +560,7 @@ async def delete_unconfirmed_transactions(self, wallet_id: int) -> None: self.add_to_log("delete_unconfirmed_transactions", (wallet_id,)) return None - inst_rpc_client = UnconfirmedTxRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = UnconfirmedTxRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client command_args = [ "wallet", @@ -586,7 +586,7 @@ async def get_current_derivation_index(self) -> str: self.add_to_log("get_current_derivation_index", ()) return str(520) - inst_rpc_client = GetDerivationIndexRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = GetDerivationIndexRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client command_args = [ "wallet", @@ -605,7 +605,7 @@ def test_sign_message(capsys: object, get_test_cli_clients: tuple[TestRpcClients test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client - inst_rpc_client = TestWalletRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = TestWalletRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client xch_addr = encode_puzzle_hash(get_bytes32(1), "xch") message = b"hello world" @@ -633,7 +633,7 @@ async def extend_derivation_index(self, index: int) -> str: self.add_to_log("extend_derivation_index", (index,)) return str(index) - inst_rpc_client = UpdateDerivationIndexRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = UpdateDerivationIndexRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client index = 600 command_args = ["wallet", "update_derivation_index", FINGERPRINT_ARG, "--index", str(index)] @@ -657,7 +657,7 @@ async def set_cat_name(self, wallet_id: int, name: str) -> None: self.add_to_log("set_cat_name", (wallet_id, name)) return None # we don't need to do anything here - inst_rpc_client = AddTokenRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = AddTokenRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client command_args = ["wallet", "add_token", FINGERPRINT_ARG, "-nexamplecat"] assert_list = [f"Successfully renamed test1 with wallet_id 2 on key {FINGERPRINT} to examplecat"] @@ -765,7 +765,7 @@ async def create_offer_for_ids( return CreateOfferForIDsResponse([STD_UTX], [STD_TX], created_offer, trade_offer) - inst_rpc_client = MakeOfferRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = MakeOfferRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client request_cat_id = get_bytes32(2) request_nft_id = get_bytes32(2) @@ -913,7 +913,7 @@ async def get_all_offers( records.append(trade_offer) return records - inst_rpc_client = GetOffersRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = GetOffersRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client command_args = [ "wallet", @@ -999,7 +999,7 @@ async def take_offer( ), ) - inst_rpc_client = TakeOfferRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = TakeOfferRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client # these are various things that should be in the output cat1 = bytes32.from_hexstr("fd6a341ed39c05c31157d5bfea395a0e142398ced24deea1e82f836d7ec2909c") @@ -1084,7 +1084,7 @@ async def cancel_offer( self.add_to_log("cancel_offer", (trade_id, tx_config, fee, secure, push, timelock_info)) return CancelOfferResponse([STD_UTX], [STD_TX]) - inst_rpc_client = CancelOfferRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = CancelOfferRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client command_args = [ "wallet", diff --git a/chia/_tests/process_junit.py b/chia/_tests/process_junit.py index 8442c88464eb..abf65408ca16 100644 --- a/chia/_tests/process_junit.py +++ b/chia/_tests/process_junit.py @@ -345,5 +345,4 @@ def output_time_out_assert( if __name__ == "__main__": - # pylint: disable = no-value-for-parameter main() diff --git a/chia/_tests/util/misc.py b/chia/_tests/util/misc.py index 751d9aacaa08..3ea473c59479 100644 --- a/chia/_tests/util/misc.py +++ b/chia/_tests/util/misc.py @@ -348,7 +348,7 @@ def __exit__( label=self.label, ) - ether.record_property( # pylint: disable=E1102 + ether.record_property( data.tag, json.dumps(data.marshal(), ensure_ascii=True, sort_keys=True), ) diff --git a/chia/_tests/util/time_out_assert.py b/chia/_tests/util/time_out_assert.py index a777c802675e..b1b99917077f 100644 --- a/chia/_tests/util/time_out_assert.py +++ b/chia/_tests/util/time_out_assert.py @@ -125,7 +125,7 @@ async def time_out_assert_custom_interval( timed_out=timed_out, ) - ether.record_property( # pylint: disable=E1102 + ether.record_property( data.tag, json.dumps(data.marshal(), ensure_ascii=True, sort_keys=True), ) diff --git a/chia/_tests/wallet/rpc/test_wallet_rpc.py b/chia/_tests/wallet/rpc/test_wallet_rpc.py index db4c17f9eec1..e2aad760b6e6 100644 --- a/chia/_tests/wallet/rpc/test_wallet_rpc.py +++ b/chia/_tests/wallet/rpc/test_wallet_rpc.py @@ -281,7 +281,7 @@ async def assert_push_tx_error(node_rpc: FullNodeRpcClient, tx: TransactionRecor try: await node_rpc.push_tx(spend_bundle) except ValueError as error: - error_string = error.args[0]["error"] # noqa: # pylint: disable=E1126 + error_string = error.args[0]["error"] if error_string.find("ASSERT_ANNOUNCE_CONSUMED_FAILED") == -1: raise ValueError from error @@ -413,7 +413,7 @@ async def test_push_transactions(wallet_rpc_environment: WalletRpcTestEnvironmen ).signed_tx resp_client = await client.push_transactions( - PushTransactions(transactions=[tx], fee=uint64(10)), # pylint: disable=unexpected-keyword-arg + PushTransactions(transactions=[tx], fee=uint64(10)), DEFAULT_TX_CONFIG, ) resp = await client.fetch( diff --git a/chia/_tests/wallet/test_wallet_state_manager.py b/chia/_tests/wallet/test_wallet_state_manager.py index 481173c0bd51..06332c1f0834 100644 --- a/chia/_tests/wallet/test_wallet_state_manager.py +++ b/chia/_tests/wallet/test_wallet_state_manager.py @@ -226,7 +226,7 @@ async def test_confirming_txs_not_ours(wallet_environments: WalletTestFramework) ) await env_2.rpc_client.push_transactions( - PushTransactions( # pylint: disable=unexpected-keyword-arg + PushTransactions( transactions=action_scope.side_effects.transactions, sign=False, ), diff --git a/chia/cmds/chia.py b/chia/cmds/chia.py index cf67f4f843a1..3c7291a72de1 100644 --- a/chia/cmds/chia.py +++ b/chia/cmds/chia.py @@ -136,7 +136,7 @@ def run_daemon_cmd(ctx: click.Context, wait_for_unlock: bool) -> None: def main() -> None: import chia.cmds.signer # noqa - cli() # pylint: disable=no-value-for-parameter + cli() if __name__ == "__main__": diff --git a/chia/cmds/cmd_classes.py b/chia/cmds/cmd_classes.py index 1aedf7ee963d..0771c10247c7 100644 --- a/chia/cmds/cmd_classes.py +++ b/chia/cmds/cmd_classes.py @@ -41,7 +41,7 @@ def option(*param_decls: str, **kwargs: Any) -> Any: else: default_default = MISSING - return field( # pylint: disable=invalid-field-call + return field( metadata=dict( option_args=dict( param_decls=tuple(param_decls), @@ -268,7 +268,7 @@ class WalletClientInfo: @command_helper class NeedsWalletRPC: - context: Context = field(default_factory=dict) # pylint: disable=invalid-field-call + context: Context = field(default_factory=dict) client_info: Optional[WalletClientInfo] = None wallet_rpc_port: Optional[int] = option( "-wp", @@ -294,7 +294,7 @@ async def wallet_rpc(self, **kwargs: Any) -> AsyncIterator[WalletClientInfo]: yield self.client_info else: if "root_path" not in kwargs: - kwargs["root_path"] = self.context["root_path"] # pylint: disable=unsubscriptable-object + kwargs["root_path"] = self.context["root_path"] async with get_wallet_client(self.wallet_rpc_port, self.fingerprint, **kwargs) as ( wallet_client, fp, diff --git a/chia/cmds/signer.py b/chia/cmds/signer.py index 85d90337bdc2..56b49e7982d8 100644 --- a/chia/cmds/signer.py +++ b/chia/cmds/signer.py @@ -151,7 +151,7 @@ class SPIn(_SPTranslation): def read_sp_input(self, typ: type[_T_ClvmStreamable]) -> list[_T_ClvmStreamable]: final_list: list[_T_ClvmStreamable] = [] - for filename in self.signer_protocol_input: # pylint: disable=not-an-iterable + for filename in self.signer_protocol_input: with open(Path(filename), "rb") as file: final_list.append( byte_deserialize_clvm_streamable( diff --git a/chia/daemon/windows_signal.py b/chia/daemon/windows_signal.py index 67f8789e6a57..74424a0723b7 100644 --- a/chia/daemon/windows_signal.py +++ b/chia/daemon/windows_signal.py @@ -21,8 +21,8 @@ import threading sigmap = { - signal.SIGINT: signal.CTRL_C_EVENT, # pylint: disable=E1101 - signal.SIGBREAK: signal.CTRL_BREAK_EVENT, # pylint: disable=E1101 + signal.SIGINT: signal.CTRL_C_EVENT, + signal.SIGBREAK: signal.CTRL_BREAK_EVENT, } def kill(pid: int, signum: signal.Signals) -> None: diff --git a/chia/data_layer/data_store.py b/chia/data_layer/data_store.py index ead6f196cfd2..2e7688097893 100644 --- a/chia/data_layer/data_store.py +++ b/chia/data_layer/data_store.py @@ -496,8 +496,7 @@ async def change_root_status(self, root: Root, status: Status = Status.PENDING) async def check(self) -> None: for check in self._checks: - # pylint seems to think these are bound methods not unbound methods. - await check(self) # pylint: disable=too-many-function-args + await check(self) async def _check_roots_are_incrementing(self) -> None: async with self.db_wrapper.reader() as reader: diff --git a/chia/full_node/full_node.py b/chia/full_node/full_node.py index f5531e8d7b90..10d58f82f89b 100644 --- a/chia/full_node/full_node.py +++ b/chia/full_node/full_node.py @@ -2651,7 +2651,7 @@ async def add_transaction( await self.broadcast_added_tx(mempool_item, current_peer=peer) if self.simulator_transaction_callback is not None: # callback - await self.simulator_transaction_callback(spend_name) # pylint: disable=E1102 + await self.simulator_transaction_callback(spend_name) else: self.mempool_manager.remove_seen(spend_name) diff --git a/chia/rpc/data_layer_rpc_util.py b/chia/rpc/data_layer_rpc_util.py index 315152222f40..33b8ca4846cb 100644 --- a/chia/rpc/data_layer_rpc_util.py +++ b/chia/rpc/data_layer_rpc_util.py @@ -26,9 +26,7 @@ async def __call__(self, request: dict[str, Any]) -> dict[str, Any]: class UnboundMarshalledRoute(Protocol): # Ignoring pylint complaint about the name of the first argument since this is a # special case. - async def __call__( # pylint: disable=E0213 - protocol_self, self: Any, request: MarshallableProtocol - ) -> MarshallableProtocol: + async def __call__(protocol_self, self: Any, request: MarshallableProtocol) -> MarshallableProtocol: pass diff --git a/chia/rpc/wallet_request_types.py b/chia/rpc/wallet_request_types.py index 387475823741..1b091bd4effe 100644 --- a/chia/rpc/wallet_request_types.py +++ b/chia/rpc/wallet_request_types.py @@ -1,5 +1,3 @@ -# pylint: disable=invalid-field-call - from __future__ import annotations import sys diff --git a/chia/rpc/wallet_rpc_api.py b/chia/rpc/wallet_rpc_api.py index 56f32200f0c1..5458195e5aed 100644 --- a/chia/rpc/wallet_rpc_api.py +++ b/chia/rpc/wallet_rpc_api.py @@ -3186,7 +3186,7 @@ async def dao_create_proposal( return { "success": True, # Semantics guarantee proposal_id here - "proposal_id": proposal_id, # pylint: disable=possibly-used-before-assignment + "proposal_id": proposal_id, "tx_id": None, # tx_endpoint wrapper will take care of this "tx": None, # tx_endpoint wrapper will take care of this "transactions": None, # tx_endpoint wrapper will take care of this diff --git a/chia/server/chia_policy.py b/chia/server/chia_policy.py index 25a1a8dfaa14..6073f69c173e 100644 --- a/chia/server/chia_policy.py +++ b/chia/server/chia_policy.py @@ -261,7 +261,7 @@ async def _chia_accept_loop(self, listener: socket.socket) -> tuple[socket.socke try: return await self._chia_accept(listener) except OSError as exc: - if exc.winerror not in ( # pylint: disable=E1101 + if exc.winerror not in ( _winapi.ERROR_NETNAME_DELETED, _winapi.ERROR_OPERATION_ABORTED, ): @@ -269,7 +269,7 @@ async def _chia_accept_loop(self, listener: socket.socket) -> tuple[socket.socke def _chia_accept(self, listener: socket.socket) -> asyncio.Future[tuple[socket.socket, tuple[object, ...]]]: self._register_with_iocp(listener) - conn = self._get_accept_socket(listener.family) # pylint: disable=assignment-from-no-return + conn = self._get_accept_socket(listener.family) ov = _overlapped.Overlapped(_winapi.NULL) ov.AcceptEx(listener.fileno(), conn.fileno()) @@ -292,13 +292,13 @@ async def accept_coro(self: ChiaProactor, future: asyncio.Future[object], conn: raise except OSError as exc: # https://github.com/python/cpython/issues/93821#issuecomment-1157945855 - if exc.winerror not in ( # pylint: disable=E1101 + if exc.winerror not in ( _winapi.ERROR_NETNAME_DELETED, _winapi.ERROR_OPERATION_ABORTED, ): raise - future = self._register(ov, listener, finish_accept) # pylint: disable=assignment-from-no-return + future = self._register(ov, listener, finish_accept) coro = accept_coro(self, future, conn) asyncio.ensure_future(coro, loop=self._loop) return future diff --git a/chia/util/db_wrapper.py b/chia/util/db_wrapper.py index 4453fc4d747e..99176aa3a794 100644 --- a/chia/util/db_wrapper.py +++ b/chia/util/db_wrapper.py @@ -114,8 +114,7 @@ def get_host_parameter_limit() -> int: if sys.version_info >= (3, 11): connection = sqlite3.connect(":memory:") - # sqlite3.SQLITE_LIMIT_VARIABLE_NUMBER exists in 3.11, pylint - limit_number = sqlite3.SQLITE_LIMIT_VARIABLE_NUMBER # pylint: disable=E1101 + limit_number = sqlite3.SQLITE_LIMIT_VARIABLE_NUMBER host_parameter_limit = connection.getlimit(limit_number) else: # guessing based on defaults, seems you can't query diff --git a/chia/util/dump_keyring.py b/chia/util/dump_keyring.py index e4c9b2a3004d..11b8831d3c7f 100755 --- a/chia/util/dump_keyring.py +++ b/chia/util/dump_keyring.py @@ -86,7 +86,7 @@ def dump(keyring_file: str, full_payload: bool, passphrase_file: Optional[TextIO def main() -> None: colorama.init() - dump() # pylint: disable=no-value-for-parameter + dump() if __name__ == "__main__": diff --git a/chia/util/task_timing.py b/chia/util/task_timing.py index 3ebce35a6779..e951afb10a4b 100644 --- a/chia/util/task_timing.py +++ b/chia/util/task_timing.py @@ -128,7 +128,7 @@ def __init__(self, name: str, file: str) -> None: def get_stack(frame: FrameType) -> str: ret = "" code = frame.f_code - while code.co_flags & inspect.CO_COROUTINE: # pylint: disable=no-member + while code.co_flags & inspect.CO_COROUTINE: ret = f"/{code.co_name}{ret}" if frame.f_back is None: break @@ -162,7 +162,7 @@ def trace_fun(frame: FrameType, event: str, arg: Any) -> None: return # we only care about instrumenting co-routines - if (frame.f_code.co_flags & inspect.CO_COROUTINE) == 0: # pylint: disable=no-member + if (frame.f_code.co_flags & inspect.CO_COROUTINE) == 0: # with open("instrumentation.log", "a") as f: # f.write(f"[1] {event} {get_fun(frame)}\n") return diff --git a/chia/wallet/dao_wallet/dao_wallet.py b/chia/wallet/dao_wallet/dao_wallet.py index 2b04cf4dea61..03b76aaca93e 100644 --- a/chia/wallet/dao_wallet/dao_wallet.py +++ b/chia/wallet/dao_wallet/dao_wallet.py @@ -1473,7 +1473,6 @@ async def create_proposal_close_spend( spend_bundle = WalletSpendBundle([proposal_cs, treasury_cs], AugSchemeMPL.aggregate([])) else: # TODO: maybe we can refactor this to provide clarity around timer_cs having been defined - # pylint: disable-next=E0606 spend_bundle = WalletSpendBundle([proposal_cs, timer_cs, treasury_cs], AugSchemeMPL.aggregate([])) if fee > 0: await self.standard_wallet.create_tandem_xch_tx(fee, action_scope) diff --git a/chia/wallet/nft_wallet/nft_puzzles.py b/chia/wallet/nft_wallet/nft_puzzles.py index 90dc670e7570..94e88a0d38b0 100644 --- a/chia/wallet/nft_wallet/nft_puzzles.py +++ b/chia/wallet/nft_wallet/nft_puzzles.py @@ -98,13 +98,13 @@ async def get_nft_info_from_puzzle(nft_coin_info: NFTCoinInfo, config: dict[str, assert uncurried_nft is not None data_uris: list[str] = [] - for uri in uncurried_nft.data_uris.as_python(): # pylint: disable=E1133 + for uri in uncurried_nft.data_uris.as_python(): data_uris.append(str(uri, "utf-8")) meta_uris: list[str] = [] - for uri in uncurried_nft.meta_uris.as_python(): # pylint: disable=E1133 + for uri in uncurried_nft.meta_uris.as_python(): meta_uris.append(str(uri, "utf-8")) license_uris: list[str] = [] - for uri in uncurried_nft.license_uris.as_python(): # pylint: disable=E1133 + for uri in uncurried_nft.license_uris.as_python(): license_uris.append(str(uri, "utf-8")) off_chain_metadata: Optional[str] = None nft_info = NFTInfo( diff --git a/chia/wallet/util/blind_signer_tl.py b/chia/wallet/util/blind_signer_tl.py index 2d017b5bddef..be2f24470e0c 100644 --- a/chia/wallet/util/blind_signer_tl.py +++ b/chia/wallet/util/blind_signer_tl.py @@ -17,9 +17,6 @@ ) from chia.wallet.util.clvm_streamable import TranslationLayer, TranslationLayerMapping, clvm_streamable -# Pylint doesn't understand that these classes are in fact dataclasses -# pylint: disable=invalid-field-call - @clvm_streamable @dataclass(frozen=True) diff --git a/chia/wallet/wallet.py b/chia/wallet/wallet.py index 58ebbeb67885..426c5e75ffd0 100644 --- a/chia/wallet/wallet.py +++ b/chia/wallet/wallet.py @@ -501,7 +501,7 @@ async def get_coins_to_offer( # WSChiaConnection is only imported for type checking async def coin_added( self, coin: Coin, height: uint32, peer: WSChiaConnection, coin_data: Optional[Streamable] - ) -> None: # pylint: disable=used-before-assignment + ) -> None: pass def get_name(self) -> str: diff --git a/chia/wallet/wallet_protocol.py b/chia/wallet/wallet_protocol.py index 93c569f731a9..e80427d3573a 100644 --- a/chia/wallet/wallet_protocol.py +++ b/chia/wallet/wallet_protocol.py @@ -58,9 +58,7 @@ def get_name(self) -> str: ... async def match_hinted_coin(self, coin: Coin, hint: bytes32) -> bool: ... wallet_info: WalletInfo - # WalletStateManager is only imported for type hinting thus leaving pylint - # unable to process this - wallet_state_manager: WalletStateManager # pylint: disable=used-before-assignment + wallet_state_manager: WalletStateManager class GSTOptionalArgs(TypedDict): diff --git a/poetry.lock b/poetry.lock index fc77a1709392..c68f41213dd2 100644 --- a/poetry.lock +++ b/poetry.lock @@ -255,20 +255,6 @@ cffi = ">=1.0.1" dev = ["cogapp", "pre-commit", "pytest", "wheel"] tests = ["pytest"] -[[package]] -name = "astroid" -version = "3.2.4" -description = "An abstract syntax tree for Python with inference support." -optional = true -python-versions = ">=3.8.0" -files = [ - {file = "astroid-3.2.4-py3-none-any.whl", hash = "sha256:413658a61eeca6202a59231abb473f932038fbcbf1666587f66d482083413a25"}, - {file = "astroid-3.2.4.tar.gz", hash = "sha256:0e14202810b30da1b735827f78f5157be2bbd4a7a59b7707ca0bfc2fb4c0063a"}, -] - -[package.dependencies] -typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} - [[package]] name = "async-timeout" version = "4.0.3" @@ -1233,20 +1219,6 @@ Pygments = ">=2.9.0,<3.0.0" [package.extras] toml = ["tomli (>=1.2.1)"] -[[package]] -name = "dill" -version = "0.3.7" -description = "serialize all of Python" -optional = true -python-versions = ">=3.7" -files = [ - {file = "dill-0.3.7-py3-none-any.whl", hash = "sha256:76b122c08ef4ce2eedcd4d1abd8e641114bfc6c2867f49f3c41facf65bf19f5e"}, - {file = "dill-0.3.7.tar.gz", hash = "sha256:cc1c8b182eb3013e24bd475ff2e9295af86c1a38eb1aff128dac8962a9ce3c03"}, -] - -[package.extras] -graph = ["objgraph (>=1.7.2)"] - [[package]] name = "distlib" version = "0.3.7" @@ -2491,36 +2463,6 @@ importlib-metadata = {version = ">=4.6", markers = "python_version < \"3.10\""} packaging = ">=22.0" setuptools = ">=42.0.0" -[[package]] -name = "pylint" -version = "3.2.6" -description = "python code static checker" -optional = true -python-versions = ">=3.8.0" -files = [ - {file = "pylint-3.2.6-py3-none-any.whl", hash = "sha256:03c8e3baa1d9fb995b12c1dbe00aa6c4bcef210c2a2634374aedeb22fb4a8f8f"}, - {file = "pylint-3.2.6.tar.gz", hash = "sha256:a5d01678349454806cff6d886fb072294f56a58c4761278c97fb557d708e1eb3"}, -] - -[package.dependencies] -astroid = ">=3.2.4,<=3.3.0-dev0" -colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} -dill = [ - {version = ">=0.2", markers = "python_version < \"3.11\""}, - {version = ">=0.3.6", markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, - {version = ">=0.3.7", markers = "python_version >= \"3.12\""}, -] -isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" -mccabe = ">=0.6,<0.8" -platformdirs = ">=2.2.0" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -tomlkit = ">=0.10.1" -typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} - -[package.extras] -spelling = ["pyenchant (>=3.2,<4.0)"] -testutils = ["gitpython (>3)"] - [[package]] name = "pyproject-hooks" version = "1.0.0" @@ -2773,6 +2715,33 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "ruff" +version = "0.7.1" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.7.1-py3-none-linux_armv6l.whl", hash = "sha256:cb1bc5ed9403daa7da05475d615739cc0212e861b7306f314379d958592aaa89"}, + {file = "ruff-0.7.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:27c1c52a8d199a257ff1e5582d078eab7145129aa02721815ca8fa4f9612dc35"}, + {file = "ruff-0.7.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:588a34e1ef2ea55b4ddfec26bbe76bc866e92523d8c6cdec5e8aceefeff02d99"}, + {file = "ruff-0.7.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94fc32f9cdf72dc75c451e5f072758b118ab8100727168a3df58502b43a599ca"}, + {file = "ruff-0.7.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:985818742b833bffa543a84d1cc11b5e6871de1b4e0ac3060a59a2bae3969250"}, + {file = "ruff-0.7.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32f1e8a192e261366c702c5fb2ece9f68d26625f198a25c408861c16dc2dea9c"}, + {file = "ruff-0.7.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:699085bf05819588551b11751eff33e9ca58b1b86a6843e1b082a7de40da1565"}, + {file = "ruff-0.7.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:344cc2b0814047dc8c3a8ff2cd1f3d808bb23c6658db830d25147339d9bf9ea7"}, + {file = "ruff-0.7.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4316bbf69d5a859cc937890c7ac7a6551252b6a01b1d2c97e8fc96e45a7c8b4a"}, + {file = "ruff-0.7.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79d3af9dca4c56043e738a4d6dd1e9444b6d6c10598ac52d146e331eb155a8ad"}, + {file = "ruff-0.7.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:c5c121b46abde94a505175524e51891f829414e093cd8326d6e741ecfc0a9112"}, + {file = "ruff-0.7.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8422104078324ea250886954e48f1373a8fe7de59283d747c3a7eca050b4e378"}, + {file = "ruff-0.7.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:56aad830af8a9db644e80098fe4984a948e2b6fc2e73891538f43bbe478461b8"}, + {file = "ruff-0.7.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:658304f02f68d3a83c998ad8bf91f9b4f53e93e5412b8f2388359d55869727fd"}, + {file = "ruff-0.7.1-py3-none-win32.whl", hash = "sha256:b517a2011333eb7ce2d402652ecaa0ac1a30c114fbbd55c6b8ee466a7f600ee9"}, + {file = "ruff-0.7.1-py3-none-win_amd64.whl", hash = "sha256:f38c41fcde1728736b4eb2b18850f6d1e3eedd9678c914dede554a70d5241307"}, + {file = "ruff-0.7.1-py3-none-win_arm64.whl", hash = "sha256:19aa200ec824c0f36d0c9114c8ec0087082021732979a359d6f3c390a6ff2a37"}, + {file = "ruff-0.7.1.tar.gz", hash = "sha256:9d8a41d4aa2dad1575adb98a82870cf5db5f76b2938cf2206c22c940034a36f4"}, +] + [[package]] name = "s3transfer" version = "0.10.1" @@ -2991,17 +2960,6 @@ files = [ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] -[[package]] -name = "tomlkit" -version = "0.12.1" -description = "Style preserving TOML library" -optional = true -python-versions = ">=3.7" -files = [ - {file = "tomlkit-0.12.1-py3-none-any.whl", hash = "sha256:712cbd236609acc6a3e2e97253dfc52d4c2082982a88f61b640ecf0817eab899"}, - {file = "tomlkit-0.12.1.tar.gz", hash = "sha256:38e1ff8edb991273ec9f6181244a6a391ac30e9f5098e7535640ea6be97a7c86"}, -] - [[package]] name = "types-aiofiles" version = "23.2.0.20240311" @@ -3422,11 +3380,11 @@ url = "https://pypi.chia.net/simple" reference = "chia" [extras] -dev = ["aiohttp_cors", "black", "build", "coverage", "diff-cover", "flake8", "isort", "lxml", "mypy", "pre-commit", "pre-commit", "py3createtorrent", "pyinstaller", "pylint", "pytest", "pytest-cov", "pytest-mock", "pytest-monitor", "pytest-xdist", "pyupgrade", "types-aiofiles", "types-cryptography", "types-pyyaml", "types-setuptools"] +dev = ["aiohttp_cors", "black", "build", "coverage", "diff-cover", "flake8", "isort", "lxml", "mypy", "pre-commit", "pre-commit", "py3createtorrent", "pyinstaller", "pytest", "pytest-cov", "pytest-mock", "pytest-monitor", "pytest-xdist", "pyupgrade", "types-aiofiles", "types-cryptography", "types-pyyaml", "types-setuptools"] legacy-keyring = ["keyrings.cryptfile"] upnp = ["miniupnpc"] [metadata] lock-version = "2.0" python-versions = ">=3.9, <3.13" -content-hash = "fe87fd693372aa1c4e1596ce31c01cee1e50c5f7f24b9099c80410fde037568d" +content-hash = "18912f0d4b20b794111a68c8c95cc875ede2cc0504c1640d81e4c2d893b3276d" diff --git a/pylintrc b/pylintrc deleted file mode 100644 index 7913c3742373..000000000000 --- a/pylintrc +++ /dev/null @@ -1,583 +0,0 @@ -[MASTER] -# A comma-separated list of package or module names from where C extensions may -# be loaded. Extensions are loading into the active Python interpreter and may -# run arbitrary code -extension-pkg-allow-list=lxml,zstd - -# Add files or directories to the blacklist. They should be base names, not -# paths. -ignore=CVS - -# Add files or directories matching the regex patterns to the blacklist. The -# regex matches against base names, not paths. -ignore-patterns= - -# Python code to execute, usually for sys.path manipulation such as -# pygtk.require(). -#init-hook= - -# Use multiple processes to speed up Pylint. -jobs=4 - -# List of plugins (as comma separated values of python modules names) to load, -# usually to register additional checkers. -load-plugins= - -# Pickle collected data for later comparisons. -persistent=yes - -# Specify a configuration file. -#rcfile= - -# When enabled, pylint would attempt to guess common misconfiguration and emit -# user-friendly hints instead of false-positive error messages -suggestion-mode=yes - -# Allow loading of arbitrary C extensions. Extensions are imported into the -# active Python interpreter and may run arbitrary code. -unsafe-load-any-extension=no - - -[MESSAGES CONTROL] - -# Only show warnings with the listed confidence levels. Leave empty to show -# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED -confidence= - -# Disable the message, report, category or checker with the given id(s). You -# can either give multiple identifiers separated by comma (,) or put this -# option multiple times (only on the command line, not in the configuration -# file where it should appear only once).You can also use "--disable=all" to -# disable everything first and then reenable specific checks. For example, if -# you want to run only the similarities checker, you can use "--disable=all -# --enable=similarities". If you want to run only the classes checker, but have -# no Warning level messages displayed, use"--disable=all --enable=classes -# --disable=W" -disable=locally-disabled, - suppressed-message, - import-error, - abstract-method, - arguments-differ, - arguments-renamed, - attribute-defined-outside-init, - bare-except, - broad-exception-caught, - broad-exception-raised, - cell-var-from-loop, - chained-comparison, - consider-iterating-dictionary, - consider-using-enumerate, - consider-using-in, - consider-using-with, - cyclic-import, - dangerous-default-value, - duplicate-code, - duplicate-key, - expression-not-assigned, - fixme, - global-statement, - global-variable-not-assigned, - implicit-str-concat, - import-outside-toplevel, - inconsistent-return-statements, - invalid-name, - keyword-arg-before-vararg, - line-too-long, - logging-fstring-interpolation, - logging-not-lazy, - lost-exception, - missing-class-docstring, - missing-function-docstring, - missing-module-docstring, - no-else-break, - no-else-continue, - no-else-raise, - no-else-return, - pointless-statement, - pointless-string-statement, - protected-access, - raise-missing-from, - raising-format-tuple, - redefined-argument-from-local, - redefined-builtin, - redefined-outer-name, - reimported, - return-in-finally, - simplifiable-condition, - simplifiable-if-expression, - simplifiable-if-statement, - singleton-comparison, - subprocess-run-check, - superfluous-parens, - too-few-public-methods, - too-many-ancestors, - too-many-arguments, - too-many-boolean-expressions, - too-many-branches, - too-many-instance-attributes, - too-many-lines, - too-many-locals, - too-many-nested-blocks, - too-many-public-methods, - too-many-return-statements, - too-many-statements, - try-except-raise, - typevar-name-incorrect-variance, - unbalanced-tuple-unpacking, - undefined-loop-variable, - ungrouped-imports, - unidiomatic-typecheck, - unknown-option-value, - unnecessary-comprehension, - unnecessary-dict-index-lookup, - unnecessary-dunder-call, - unnecessary-ellipsis, - unnecessary-lambda, - unnecessary-lambda-assignment, - unnecessary-negation, - unnecessary-pass, - unspecified-encoding, - unused-argument, - unused-import, - unused-variable, - unused-wildcard-import, - use-a-generator, - use-dict-literal, - use-implicit-booleaness-not-comparison, - use-implicit-booleaness-not-len, - useless-else-on-loop, - useless-import-alias, - useless-return, - use-list-literal, - wildcard-import, - wrong-import-order, - wrong-import-position, - multiple-statements, - contextmanager-generator-missing-cleanup, - -# Enable the message, report, category or checker with the given id(s). You can -# either give multiple identifier separated by comma (,) or put this option -# multiple time (only on the command line, not in the configuration file where -# it should appear only once). See also the "--disable" option for examples. -enable=c-extension-no-member - - -[REPORTS] - -# Python expression which should return a note less than 10 (10 is the highest -# note). You have access to the variables errors warning, statement which -# respectively contain the number of errors / warnings messages and the total -# number of statements analyzed. This is used by the global evaluation report -# (RP0004). -evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) - -# Template used to display messages. This is a python new-style format string -# used to format the message information. See doc for all details -#msg-template= - -# Set the output format. Available formats are text, parseable, colorized, json -# and msvs (visual studio).You can also give a reporter class, eg -# mypackage.mymodule.MyReporterClass. -output-format=text - -# Tells whether to display a full report or only the messages -reports=no - -# Activate the evaluation score. -score=no - - -[REFACTORING] - -# Maximum number of nested blocks for function / method body -max-nested-blocks=5 - -# Complete name of functions that never returns. When checking for -# inconsistent-return-statements if a never returning function is called then -# it will be considered as an explicit return statement and no message will be -# printed. -never-returning-functions=optparse.Values,sys.exit - - -[VARIABLES] - -# List of additional names supposed to be defined in builtins. Remember that -# you should avoid to define new builtins when possible. -additional-builtins=cls - -# Tells whether unused global variables should be treated as a violation. -allow-global-unused-variables=yes - -# List of strings which can identify a callback function by name. A callback -# name must start or end with one of those strings. -callbacks=cb_, - _cb - -# A regular expression matching the name of dummy variables (i.e. expectedly -# not used). -dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ - -# Argument names that match this expression will be ignored. Default to name -# with leading underscore -ignored-argument-names=_.*|^ignored_|^unused_ - -# Tells whether we should check for unused import in __init__ files. -init-import=no - -# List of qualified module names which can have objects that can redefine -# builtins. -redefining-builtins-modules=six.moves,past.builtins,future.builtins - - -[LOGGING] - -# Logging modules to check that the string format arguments are in logging -# function parameter format -logging-modules=logging - - -[TYPECHECK] - -# List of decorators that produce context managers, such as -# contextlib.contextmanager. Add to this list to register other decorators that -# produce valid context managers. -contextmanager-decorators=contextlib.contextmanager - -# List of members which are set dynamically and missed by pylint inference -# system, and so shouldn't trigger E1101 when accessed. Python regular -# expressions are accepted. -generated-members= - -# Tells whether missing members accessed in mixin class should be ignored. A -# mixin class is detected if its name ends with "mixin" (case insensitive). -ignore-mixin-members=yes - -# This flag controls whether pylint should warn about no-member and similar -# checks whenever an opaque object is returned when inferring. The inference -# can return multiple potential results while evaluating a Python object, but -# some branches might not be evaluated, which results in partial inference. In -# that case, it might be useful to still emit no-member and other checks for -# the rest of the inferred objects. -ignore-on-opaque-inference=yes - -# List of class names for which member attributes should not be checked (useful -# for classes with dynamically set attributes). This supports the use of -# qualified names. -ignored-classes=optparse.Values,thread._local,_thread._local - -# List of module names for which member attributes should not be checked -# (useful for modules/projects where namespaces are manipulated during runtime -# and thus existing member attributes cannot be deduced by static analysis. It -# supports qualified module names, as well as Unix pattern matching. -ignored-modules=chiabip158, - chiapos, - chiavdf, - chia_rs, - cryptography, - aiohttp, - keyring, - bitstring, - clvm_tools, - clvm_tools_rs, - setproctitle, - clvm, - colorlog, - concurrent_log_handler, - aiosqlite, - sortedcontainers, - aiter, - miniupnpc, - pytest, - setuptools_scm - -# Show a hint with possible names when a member name was not found. The aspect -# of finding the hint is based on edit distance. -missing-member-hint=yes - -# The minimum edit distance a name should have in order to be considered a -# similar match for a missing member name. -missing-member-hint-distance=1 - -# The total number of similar names that should be taken in consideration when -# showing a hint for a missing member. -missing-member-max-choices=1 - - -[MISCELLANEOUS] - -# List of note tags to take in consideration, separated by a comma. -notes=FIXME, - XXX, - TODO - - -[BASIC] - -# Naming style matching correct argument names -argument-naming-style=snake_case - -# Regular expression matching correct argument names. Overrides argument- -# naming-style -#argument-rgx= - -# Naming style matching correct attribute names -attr-naming-style=snake_case - -# Regular expression matching correct attribute names. Overrides attr-naming- -# style -#attr-rgx= - -# Bad variable names which should always be refused, separated by a comma -bad-names=foo, - bar, - baz, - toto, - tutu, - tata - -# Naming style matching correct class attribute names -class-attribute-naming-style=any - -# Regular expression matching correct class attribute names. Overrides class- -# attribute-naming-style -#class-attribute-rgx= - -# Naming style matching correct class names -class-naming-style=PascalCase - -# Regular expression matching correct class names. Overrides class-naming-style -#class-rgx= - -# Naming style matching correct constant names -const-naming-style=UPPER_CASE - -# Regular expression matching correct constant names. Overrides const-naming- -# style -#const-rgx= - -# Minimum line length for functions/classes that require docstrings, shorter -# ones are exempt. -docstring-min-length=-1 - -# Naming style matching correct function names -function-naming-style=snake_case - -# Regular expression matching correct function names. Overrides function- -# naming-style -#function-rgx= - -# Good variable names which should always be accepted, separated by a comma -good-names=i, - j, - k, - ex, - Run, - _ - -# Include a hint for the correct naming format with invalid-name -include-naming-hint=no - -# Naming style matching correct inline iteration names -inlinevar-naming-style=any - -# Regular expression matching correct inline iteration names. Overrides -# inlinevar-naming-style -#inlinevar-rgx= - -# Naming style matching correct method names -method-naming-style=snake_case - -# Regular expression matching correct method names. Overrides method-naming- -# style -#method-rgx= - -# Naming style matching correct module names -module-naming-style=snake_case - -# Regular expression matching correct module names. Overrides module-naming- -# style -#module-rgx= - -# Colon-delimited sets of names that determine each other's naming style when -# the name regexes allow several styles. -name-group= - -# Regular expression which should only match function or class names that do -# not require a docstring. -no-docstring-rgx=^_ - -# List of decorators that produce properties, such as abc.abstractproperty. Add -# to this list to register other decorators that produce valid properties. -property-classes=abc.abstractproperty - -# Naming style matching correct variable names -variable-naming-style=snake_case - -# Regular expression matching correct variable names. Overrides variable- -# naming-style -#variable-rgx= - - -[SPELLING] - -# Limits count of emitted suggestions for spelling mistakes -max-spelling-suggestions=4 - -# Spelling dictionary name. Available dictionaries: none. To make it working -# install python-enchant package. -spelling-dict= - -# List of comma separated words that should not be checked. -spelling-ignore-words= - -# A path to a file that contains private dictionary; one word per line. -spelling-private-dict-file= - -# Tells whether to store unknown words to indicated private dictionary in -# --spelling-private-dict-file option instead of raising a message. -spelling-store-unknown-words=no - - -[FORMAT] - -# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. -expected-line-ending-format= - -# Regexp for a line that is allowed to be longer than the limit. -ignore-long-lines=^\s*(# )??$ - -# Number of spaces of indent required inside a hanging or continued line. -indent-after-paren=4 - -# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 -# tab). -indent-string=' ' - -# Maximum number of characters on a single line. -max-line-length=120 - -# Maximum number of lines in a module -max-module-lines=1000 - -# Allow the body of a class to be on the same line as the declaration if body -# contains single statement. -single-line-class-stmt=no - -# Allow the body of an if to be on the same line as the test if there is no -# else. -single-line-if-stmt=no - - -[SIMILARITIES] - -# Ignore comments when computing similarities. -ignore-comments=yes - -# Ignore docstrings when computing similarities. -ignore-docstrings=yes - -# Ignore imports when computing similarities. -ignore-imports=no - -# Minimum lines number of a similarity. -min-similarity-lines=4 - - -[DESIGN] - -# Maximum number of arguments for function / method -max-args=5 - -# Maximum number of attributes for a class (see R0902). -max-attributes=7 - -# Maximum number of boolean expressions in a if statement -max-bool-expr=5 - -# Maximum number of branch for function / method body -max-branches=12 - -# Maximum number of locals for function / method body -max-locals=15 - -# Maximum number of parents for a class (see R0901). -max-parents=7 - -# Maximum number of public methods for a class (see R0904). -max-public-methods=20 - -# Maximum number of return / yield for function / method body -max-returns=6 - -# Maximum number of statements in function / method body -max-statements=50 - -# Minimum number of public methods for a class (see R0903). -min-public-methods=2 - -# The following functions mutate other functions' signatures and need a E1120 exception -signature-mutators=chia.rpc.util.tx_endpoint - -[IMPORTS] - -# Allow wildcard imports from modules that define __all__. -allow-wildcard-with-all=no - -# Analyse import fallback blocks. This can be used to support both Python 2 and -# 3 compatible code, which means that the block might have code that exists -# only in one or another interpreter, leading to false positives when analysed. -analyse-fallback-blocks=no - -# Deprecated modules which should not be used, separated by a comma -deprecated-modules=regsub, - TERMIOS, - Bastion, - rexec - -# Create a graph of external dependencies in the given file (report RP0402 must -# not be disabled) -ext-import-graph= - -# Create a graph of every (i.e. internal and external) dependencies in the -# given file (report RP0402 must not be disabled) -import-graph= - -# Create a graph of internal dependencies in the given file (report RP0402 must -# not be disabled) -int-import-graph= - -# Force import order to recognize a module as part of the standard -# compatibility libraries. -known-standard-library= - -# Force import order to recognize a module as part of a third party library. -known-third-party=enchant - - -[CLASSES] - -# List of method names used to declare (i.e. assign) instance attributes. -defining-attr-methods=__init__, - __new__, - setUp - -# List of member names, which should be excluded from the protected access -# warning. -exclude-protected=_asdict, - _fields, - _replace, - _source, - _make - -# List of valid names for the first argument in a class method. -valid-classmethod-first-arg=cls - -# List of valid names for the first argument in a metaclass class method. -valid-metaclass-classmethod-first-arg=mcs - - -[EXCEPTIONS] - -# Exceptions that will emit a warning when being caught. Defaults to -# "Exception" -overgeneral-exceptions=builtins.Exception diff --git a/pyproject.toml b/pyproject.toml index 92db76773d38..732a42af5eab 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -91,7 +91,6 @@ mypy = { version = "1.11.1", optional = true } pre-commit = [ { version = "3.5.0", python = "<3.9", optional = true }, { version = "3.7.1", python = ">=3.9", optional = true } ] py3createtorrent = { version = "1.2.1", optional = true } pyinstaller = { version = "6.9.0", optional = true } -pylint = { version = "3.2.6", optional = true } pytest = { version = "8.3.3", optional = true } pytest-cov = { version = "5.0.0", optional = true } pytest-mock = { version = "3.14.0", optional = true } @@ -108,10 +107,11 @@ pyupgrade = { version = "3.16.0", optional = true } # numpy = [ # {version="1.24.4", python = "<3.9", optional = true}, # {version="1.26.4", python = ">=3.9", optional = true}] +ruff = "0.7.1" [tool.poetry.extras] -dev = ["aiohttp_cors", "black", "build", "coverage", "diff-cover", "flake8", "isort", "mypy", "pre-commit", "py3createtorrent", "pyinstaller", "pylint", "pytest", "pytest-cov", "pytest-mock", "pytest-monitor", "pytest-xdist", "types-aiofiles", "types-cryptography", "types-pyyaml", "types-setuptools", "pyupgrade", "lxml"] +dev = ["aiohttp_cors", "black", "build", "coverage", "diff-cover", "flake8", "isort", "mypy", "pre-commit", "py3createtorrent", "pyinstaller", "pytest", "pytest-cov", "pytest-mock", "pytest-monitor", "pytest-xdist", "types-aiofiles", "types-cryptography", "types-pyyaml", "types-setuptools", "pyupgrade", "lxml"] upnp = ["miniupnpc"] legacy_keyring = ["keyrings.cryptfile"] @@ -158,3 +158,63 @@ include = ''' )$ ''' exclude = '' + +[tool.ruff] +line-length = 120 + +[tool.ruff.lint] +preview = true +select = ["PL"] +explicit-preview-rules = false +ignore = [ + # Pylint convention + "PLC0105", # type-name-incorrect-variance + "PLC0415", # import-outside-top-level + "PLC2801", # unnecessary-dunder-call + "PLC0206", # dict-index-missing-items + "PLC1901", # compare-to-empty-string + "PLC2701", # import-private-name + "PLC0414", # useless-import-alias + + # Pylint refactor + "PLR0915", # too-many-statements + "PLR0914", # too-many-locals + "PLR0913", # too-many-arguments + "PLR0912", # too-many-branches + "PLR1702", # too-many-nested-blocks + "PLR0904", # too-many-public-methods + "PLR6301", # no-self-use + "PLR0917", # too-many-positional-arguments + "PLR6201", # literal-membership + "PLR0911", # too-many-return-statements + "PLR2004", # magic-value-comparison + "PLR1714", # repeated-equality-comparison + "PLR6104", # non-augmented-assignment + "PLR1704", # redefined-argument-from-local + "PLR0916", # too-many-boolean-expressions + "PLR5501", # collapsible-else-if + "PLR1711", # useless-return + "PLR1730", # if-stmt-min-max + "PLR1736", # unnecessary-list-index-lookup + "PLR1733", # unnecessary-dict-index-lookup + + # Pylint warning + "PLW2901", # redefined-loop-name + "PLW1641", # eq-without-hash + "PLW1514", # unspecified-encoding + "PLW0602", # global-variable-not-assigned + "PLW0603", # global-statement + "PLW0108", # unnecessary-lambda + "PLW1510", # subprocess-run-without-check + "PLW0120", # useless-else-on-loop +] + +[tool.ruff.lint.pylint] +max-args = 5 +max-locals = 15 +max-returns = 6 +max-branches = 12 +max-statements = 50 +max-nested-blocks = 5 +max-public-methods = 20 +max-bool-expr = 5 diff --git a/tools/analyze-chain.py b/tools/analyze-chain.py index 3278e6898401..293f4d5a3100 100755 --- a/tools/analyze-chain.py +++ b/tools/analyze-chain.py @@ -163,5 +163,4 @@ def default_call( if __name__ == "__main__": - # pylint: disable = no-value-for-parameter main() diff --git a/tools/analyze_memory_profile.py b/tools/analyze_memory_profile.py index 976151ce95ee..ffe08b4532e7 100644 --- a/tools/analyze_memory_profile.py +++ b/tools/analyze_memory_profile.py @@ -211,4 +211,4 @@ def analyze_slot(ctx: click.Context, slot: int) -> None: if __name__ == "__main__": - memory_profiler() # pylint: disable = no-value-for-parameter + memory_profiler() diff --git a/tools/cpu_utilization.py b/tools/cpu_utilization.py index 9acc028b0d53..40cb24d87f9b 100644 --- a/tools/cpu_utilization.py +++ b/tools/cpu_utilization.py @@ -115,5 +115,4 @@ def main(pid: int, output: str, threads: bool) -> None: if __name__ == "__main__": - # pylint: disable = no-value-for-parameter main() diff --git a/tools/generate_chain.py b/tools/generate_chain.py index 87c135539f7e..4d3b0d59823b 100644 --- a/tools/generate_chain.py +++ b/tools/generate_chain.py @@ -222,5 +222,4 @@ def main(length: int, fill_rate: int, profile: bool, block_refs: bool, output: O if __name__ == "__main__": - # pylint: disable = no-value-for-parameter main() diff --git a/tools/run_block.py b/tools/run_block.py index b8f5c7e8a0f1..2d5a9feac4cc 100644 --- a/tools/run_block.py +++ b/tools/run_block.py @@ -76,4 +76,4 @@ def get_config_and_constants(): if __name__ == "__main__": - cmd_run_json_block_file() # pylint: disable=no-value-for-parameter + cmd_run_json_block_file() diff --git a/tools/test_full_sync.py b/tools/test_full_sync.py index 2af2ca12880a..ef928ea667ca 100755 --- a/tools/test_full_sync.py +++ b/tools/test_full_sync.py @@ -204,5 +204,4 @@ async def run_sync_checkpoint( main.add_command(analyze) if __name__ == "__main__": - # pylint: disable = no-value-for-parameter main() From 03a41ac486bfaef1ca41ec1d936dd17bdf4d775b Mon Sep 17 00:00:00 2001 From: Amine Khaldi Date: Mon, 28 Oct 2024 18:31:24 +0100 Subject: [PATCH 68/69] CHIA-1668 Move validate_block_merkle_roots next to validate_block_body (#18757) Move validate_block_merkle_roots next to validate_block_body. --- chia/consensus/block_body_validation.py | 37 ++++++++++++++++++++-- chia/consensus/block_root_validation.py | 41 ------------------------- 2 files changed, 34 insertions(+), 44 deletions(-) delete mode 100644 chia/consensus/block_root_validation.py diff --git a/chia/consensus/block_body_validation.py b/chia/consensus/block_body_validation.py index 796a0699f055..7bc5dbf41eaf 100644 --- a/chia/consensus/block_body_validation.py +++ b/chia/consensus/block_body_validation.py @@ -6,17 +6,16 @@ from dataclasses import dataclass, field from typing import Callable, Optional, Union -from chia_rs import AugSchemeMPL, BLSCache, G1Element, SpendBundleConditions +from chia_rs import AugSchemeMPL, BLSCache, G1Element, SpendBundleConditions, compute_merkle_set_root from chiabip158 import PyBIP158 from chia.consensus.block_record import BlockRecord from chia.consensus.block_rewards import calculate_base_farmer_reward, calculate_pool_reward -from chia.consensus.block_root_validation import validate_block_merkle_roots from chia.consensus.blockchain_interface import BlockRecordsProtocol from chia.consensus.coinbase import create_farmer_coin, create_pool_coin from chia.consensus.constants import ConsensusConstants from chia.full_node.mempool_check_conditions import mempool_check_time_locks -from chia.types.blockchain_format.coin import Coin +from chia.types.blockchain_format.coin import Coin, hash_coin_ids from chia.types.blockchain_format.sized_bytes import bytes32 from chia.types.coin_record import CoinRecord from chia.types.full_block import FullBlock @@ -149,6 +148,38 @@ def rollback(self, header_hash: bytes32, height: int) -> None: self.removals_since_fork = {k: v for k, v in self.removals_since_fork.items() if v.height <= height} +def validate_block_merkle_roots( + block_additions_root: bytes32, + block_removals_root: bytes32, + tx_additions: list[tuple[Coin, bytes32]], + tx_removals: list[bytes32], +) -> Optional[Err]: + # Create addition Merkle set + puzzlehash_coins_map: dict[bytes32, list[bytes32]] = {} + + for coin, coin_name in tx_additions: + if coin.puzzle_hash in puzzlehash_coins_map: + puzzlehash_coins_map[coin.puzzle_hash].append(coin_name) + else: + puzzlehash_coins_map[coin.puzzle_hash] = [coin_name] + + # Addition Merkle set contains puzzlehash and hash of all coins with that puzzlehash + additions_merkle_items: list[bytes32] = [] + for puzzle, coin_ids in puzzlehash_coins_map.items(): + additions_merkle_items.append(puzzle) + additions_merkle_items.append(hash_coin_ids(coin_ids)) + + additions_root = bytes32(compute_merkle_set_root(additions_merkle_items)) + removals_root = bytes32(compute_merkle_set_root(tx_removals)) + + if block_additions_root != additions_root: + return Err.BAD_ADDITION_ROOT + if block_removals_root != removals_root: + return Err.BAD_REMOVAL_ROOT + + return None + + async def validate_block_body( constants: ConsensusConstants, records: BlockRecordsProtocol, diff --git a/chia/consensus/block_root_validation.py b/chia/consensus/block_root_validation.py deleted file mode 100644 index cdda927f3611..000000000000 --- a/chia/consensus/block_root_validation.py +++ /dev/null @@ -1,41 +0,0 @@ -from __future__ import annotations - -from typing import Optional - -from chia_rs import compute_merkle_set_root - -from chia.types.blockchain_format.coin import Coin, hash_coin_ids -from chia.types.blockchain_format.sized_bytes import bytes32 -from chia.util.errors import Err - - -def validate_block_merkle_roots( - block_additions_root: bytes32, - block_removals_root: bytes32, - tx_additions: list[tuple[Coin, bytes32]], - tx_removals: list[bytes32], -) -> Optional[Err]: - # Create addition Merkle set - puzzlehash_coins_map: dict[bytes32, list[bytes32]] = {} - - for coin, coin_name in tx_additions: - if coin.puzzle_hash in puzzlehash_coins_map: - puzzlehash_coins_map[coin.puzzle_hash].append(coin_name) - else: - puzzlehash_coins_map[coin.puzzle_hash] = [coin_name] - - # Addition Merkle set contains puzzlehash and hash of all coins with that puzzlehash - additions_merkle_items: list[bytes32] = [] - for puzzle, coin_ids in puzzlehash_coins_map.items(): - additions_merkle_items.append(puzzle) - additions_merkle_items.append(hash_coin_ids(coin_ids)) - - additions_root = bytes32(compute_merkle_set_root(additions_merkle_items)) - removals_root = bytes32(compute_merkle_set_root(tx_removals)) - - if block_additions_root != additions_root: - return Err.BAD_ADDITION_ROOT - if block_removals_root != removals_root: - return Err.BAD_REMOVAL_ROOT - - return None From cfab9eb2b3bcfa11f28ae2294b6f6451195c4179 Mon Sep 17 00:00:00 2001 From: Amine Khaldi Date: Mon, 28 Oct 2024 18:31:34 +0100 Subject: [PATCH 69/69] CHIA-1674 Remove no longer needed get_symmetric_key (#18770) Remove no longer needed get_symmetric_key. --- chia/util/file_keyring.py | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/chia/util/file_keyring.py b/chia/util/file_keyring.py index a5fbfdedbda1..e8148e6c123a 100644 --- a/chia/util/file_keyring.py +++ b/chia/util/file_keyring.py @@ -6,7 +6,6 @@ import contextlib import os import shutil -import sys import threading from collections.abc import Iterator from dataclasses import asdict, dataclass, field @@ -55,18 +54,6 @@ def symmetric_key_from_passphrase(passphrase: str, salt: bytes) -> bytes: return pbkdf2_hmac("sha256", passphrase.encode(), salt, HASH_ITERS) -def get_symmetric_key(salt: bytes) -> bytes: - from chia.cmds.passphrase_funcs import obtain_current_passphrase - - try: - passphrase = obtain_current_passphrase(use_passphrase_cache=True) - except Exception as e: - print(f"Unable to unlock the keyring: {e}") - sys.exit(1) - - return symmetric_key_from_passphrase(passphrase, salt) - - def encrypt_data(input_data: bytes, key: bytes, nonce: bytes) -> bytes: encryptor = ChaCha20Poly1305(key) data = encryptor.encrypt(nonce, CHECKBYTES_VALUE + input_data, None)