diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 85c52a45e3c7..502ac003c694 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,5 +1,4 @@ * @Chia-Network/required-reviewers /.github/**/* @Chia-Network/actions-reviewers /PRETTY_GOOD_PRACTICES.md @altendky @Chia-Network/required-reviewers -/pylintrc @altendky @Chia-Network/required-reviewers /tests/ether.py @altendky @Chia-Network/required-reviewers diff --git a/.github/workflows/dependency-review.yml b/.github/workflows/dependency-review.yml index 53a3bbdd86e2..53b8c1209bef 100644 --- a/.github/workflows/dependency-review.yml +++ b/.github/workflows/dependency-review.yml @@ -21,5 +21,5 @@ jobs: - name: "Dependency Review" uses: actions/dependency-review-action@v4 with: - allow-dependencies-licenses: pkg:pypi/pylint, pkg:pypi/pyinstaller + allow-dependencies-licenses: pkg:pypi/pyinstaller deny-licenses: AGPL-1.0-only, AGPL-1.0-or-later, AGPL-1.0-or-later, AGPL-3.0-or-later, GPL-1.0-only, GPL-1.0-or-later, GPL-2.0-only, GPL-2.0-or-later, GPL-3.0-only, GPL-3.0-or-later diff --git a/.github/workflows/upload-pypi-source.yml b/.github/workflows/upload-pypi-source.yml index fa0f3c89de4f..7baee8f1b3d1 100644 --- a/.github/workflows/upload-pypi-source.yml +++ b/.github/workflows/upload-pypi-source.yml @@ -122,8 +122,6 @@ jobs: command: black --check --diff . - name: flake8 command: flake8 benchmarks build_scripts chia tools *.py - - name: pylint - command: pylint benchmarks build_scripts chia tools *.py - name: generated protocol tests command: | python3 -m chia._tests.util.build_network_protocol_files diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 063bfe9dc4e0..ebee9872709c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -106,3 +106,10 @@ repos: entry: ./activated.py flake8 language: system types: [python] + - repo: local + hooks: + - id: ruff + name: Ruff + entry: ./activated.py ruff check --fix + language: system + types: [python] diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 5d6e871094f3..40a501a3f153 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -65,7 +65,7 @@ The [black library](https://black.readthedocs.io/en/stable/) is used as an autom The [flake8 library](https://readthedocs.org/projects/flake8/) helps ensure consistent style. The [Mypy library](https://mypy.readthedocs.io/en/stable/) is very useful for ensuring objects are of the correct type, so try to always add the type of the return value, and the type of local variables. The [isort library](https://isort.readthedocs.io) is used to sort, group and validate imports in all python files. -The [pylint library](https://pylint.pycqa.org/en/stable/) is used to further lint all python files. +The [Ruff library](https://docs.astral.sh) is used to further lint all of the python files If you want verbose logging for tests, edit the `tests/pytest.ini` file. diff --git a/benchmarks/block_ref.py b/benchmarks/block_ref.py index 640c668773c3..1b7b35520e69 100644 --- a/benchmarks/block_ref.py +++ b/benchmarks/block_ref.py @@ -98,5 +98,4 @@ def entry_point(db_path: Path) -> None: if __name__ == "__main__": - # pylint: disable = no-value-for-parameter entry_point() diff --git a/benchmarks/streamable.py b/benchmarks/streamable.py index 386da56a6917..366310c2d4d9 100644 --- a/benchmarks/streamable.py +++ b/benchmarks/streamable.py @@ -304,4 +304,4 @@ def get_bench_results() -> BenchmarkResults: if __name__ == "__main__": - run() # pylint: disable = no-value-for-parameter + run() diff --git a/chia/_tests/blockchain/blockchain_test_utils.py b/chia/_tests/blockchain/blockchain_test_utils.py index 8c2217e685a8..bf786e00f7f7 100644 --- a/chia/_tests/blockchain/blockchain_test_utils.py +++ b/chia/_tests/blockchain/blockchain_test_utils.py @@ -1,5 +1,6 @@ from __future__ import annotations +import asyncio from typing import Optional from chia_rs import BLSCache @@ -10,6 +11,7 @@ from chia.consensus.multiprocess_validation import PreValidationResult, pre_validate_blocks_multiprocessing from chia.types.full_block import FullBlock from chia.types.validation_state import ValidationState +from chia.util.augmented_chain import AugmentedBlockchain from chia.util.errors import Err from chia.util.ints import uint32, uint64 @@ -76,15 +78,16 @@ async def _validate_and_add_block( else: # validate_signatures must be False in order to trigger add_block() to # validate the signature. - pre_validation_results: list[PreValidationResult] = await pre_validate_blocks_multiprocessing( + futures = await pre_validate_blocks_multiprocessing( blockchain.constants, - blockchain, + AugmentedBlockchain(blockchain), [block], blockchain.pool, {}, ValidationState(ssi, diff, prev_ses_block), validate_signatures=False, ) + pre_validation_results: list[PreValidationResult] = list(await asyncio.gather(*futures)) assert pre_validation_results is not None results = pre_validation_results[0] if results.error is not None: diff --git a/chia/_tests/blockchain/test_blockchain.py b/chia/_tests/blockchain/test_blockchain.py index 55bbbe453a6d..51aab5cd9924 100644 --- a/chia/_tests/blockchain/test_blockchain.py +++ b/chia/_tests/blockchain/test_blockchain.py @@ -1,5 +1,6 @@ from __future__ import annotations +import asyncio import logging import random import time @@ -52,6 +53,7 @@ from chia.types.spend_bundle import SpendBundle from chia.types.unfinished_block import UnfinishedBlock from chia.types.validation_state import ValidationState +from chia.util.augmented_chain import AugmentedBlockchain from chia.util.cpu import available_logical_cores from chia.util.errors import Err from chia.util.generator_tools import get_block_header @@ -1790,15 +1792,16 @@ async def test_pre_validation_fails_bad_blocks(self, empty_blockchain: Blockchai block_bad = recursive_replace( blocks[-1], "reward_chain_block.total_iters", blocks[-1].reward_chain_block.total_iters + 1 ) - res = await pre_validate_blocks_multiprocessing( + futures = await pre_validate_blocks_multiprocessing( empty_blockchain.constants, - empty_blockchain, + AugmentedBlockchain(empty_blockchain), [blocks[0], block_bad], empty_blockchain.pool, {}, ValidationState(ssi, difficulty, None), validate_signatures=True, ) + res: list[PreValidationResult] = list(await asyncio.gather(*futures)) assert res[0].error is None assert res[1].error is not None @@ -1817,15 +1820,16 @@ async def test_pre_validation( end_i = min(i + n_at_a_time, len(blocks)) blocks_to_validate = blocks[i:end_i] start_pv = time.time() - res = await pre_validate_blocks_multiprocessing( + futures = await pre_validate_blocks_multiprocessing( empty_blockchain.constants, - empty_blockchain, + AugmentedBlockchain(empty_blockchain), blocks_to_validate, empty_blockchain.pool, {}, ValidationState(ssi, difficulty, None), validate_signatures=True, ) + res: list[PreValidationResult] = list(await asyncio.gather(*futures)) end_pv = time.time() times_pv.append(end_pv - start_pv) assert res is not None @@ -1924,15 +1928,16 @@ async def test_conditions( ) ssi = b.constants.SUB_SLOT_ITERS_STARTING diff = b.constants.DIFFICULTY_STARTING - pre_validation_results: list[PreValidationResult] = await pre_validate_blocks_multiprocessing( + futures = await pre_validate_blocks_multiprocessing( b.constants, - b, + AugmentedBlockchain(b), [blocks[-1]], b.pool, {}, ValidationState(ssi, diff, None), validate_signatures=False, ) + pre_validation_results: list[PreValidationResult] = list(await asyncio.gather(*futures)) # Ignore errors from pre-validation, we are testing block_body_validation repl_preval_results = replace(pre_validation_results[0], error=None, required_iters=uint64(1)) block = blocks[-1] @@ -2050,15 +2055,16 @@ async def test_timelock_conditions( ) ssi = b.constants.SUB_SLOT_ITERS_STARTING diff = b.constants.DIFFICULTY_STARTING - pre_validation_results: list[PreValidationResult] = await pre_validate_blocks_multiprocessing( + futures = await pre_validate_blocks_multiprocessing( b.constants, - b, + AugmentedBlockchain(b), [blocks[-1]], b.pool, {}, ValidationState(ssi, diff, None), validate_signatures=True, ) + pre_validation_results: list[PreValidationResult] = list(await asyncio.gather(*futures)) assert pre_validation_results is not None block = blocks[-1] fork_info = ForkInfo(block.height - 1, block.height - 1, block.prev_header_hash) @@ -2133,15 +2139,16 @@ async def test_aggsig_garbage( ) ssi = b.constants.SUB_SLOT_ITERS_STARTING diff = b.constants.DIFFICULTY_STARTING - pre_validation_results: list[PreValidationResult] = await pre_validate_blocks_multiprocessing( + futures = await pre_validate_blocks_multiprocessing( b.constants, - b, + AugmentedBlockchain(b), [blocks[-1]], b.pool, {}, ValidationState(ssi, diff, None), validate_signatures=False, ) + pre_validation_results: list[PreValidationResult] = list(await asyncio.gather(*futures)) # Ignore errors from pre-validation, we are testing block_body_validation repl_preval_results = replace(pre_validation_results[0], error=None, required_iters=uint64(1)) block = blocks[-1] @@ -2261,15 +2268,16 @@ async def test_ephemeral_timelock( ) ssi = b.constants.SUB_SLOT_ITERS_STARTING diff = b.constants.DIFFICULTY_STARTING - pre_validation_results: list[PreValidationResult] = await pre_validate_blocks_multiprocessing( + futures = await pre_validate_blocks_multiprocessing( b.constants, - b, + AugmentedBlockchain(b), [blocks[-1]], b.pool, {}, ValidationState(ssi, diff, None), validate_signatures=True, ) + pre_validation_results: list[PreValidationResult] = list(await asyncio.gather(*futures)) assert pre_validation_results is not None block = blocks[-1] fork_info = ForkInfo(block.height - 1, block.height - 1, block.prev_header_hash) @@ -2627,15 +2635,16 @@ async def test_cost_exceeds_max( ) )[1] assert err in [Err.BLOCK_COST_EXCEEDS_MAX] - results: list[PreValidationResult] = await pre_validate_blocks_multiprocessing( + futures = await pre_validate_blocks_multiprocessing( b.constants, - b, + AugmentedBlockchain(b), [blocks[-1]], b.pool, {}, ValidationState(ssi, diff, None), validate_signatures=False, ) + results: list[PreValidationResult] = list(await asyncio.gather(*futures)) assert results is not None assert Err(results[0].error) == Err.BLOCK_COST_EXCEEDS_MAX @@ -2808,7 +2817,7 @@ async def test_max_coin_amount(self, db_version: int, bt: BlockTools) -> None: # wt: WalletTool = bt_2.get_pool_wallet_tool() - # condition_dict: Dict[ConditionOpcode, List[ConditionWithArgs]] = {ConditionOpcode.CREATE_COIN: []} + # condition_dict: dict[ConditionOpcode, list[ConditionWithArgs]] = {ConditionOpcode.CREATE_COIN: []} # output = ConditionWithArgs(ConditionOpcode.CREATE_COIN, [bt_2.pool_ph, int_to_bytes(2 ** 64)]) # condition_dict[ConditionOpcode.CREATE_COIN].append(output) @@ -3233,15 +3242,16 @@ async def test_invalid_agg_sig(self, empty_blockchain: Blockchain, bt: BlockTool # Bad signature also fails in prevalidation ssi = b.constants.SUB_SLOT_ITERS_STARTING diff = b.constants.DIFFICULTY_STARTING - preval_results = await pre_validate_blocks_multiprocessing( + futures = await pre_validate_blocks_multiprocessing( b.constants, - b, + AugmentedBlockchain(b), [last_block], b.pool, {}, ValidationState(ssi, diff, None), validate_signatures=True, ) + preval_results: list[PreValidationResult] = list(await asyncio.gather(*futures)) assert preval_results is not None assert preval_results[0].error == Err.BAD_AGGREGATE_SIGNATURE.value @@ -3352,15 +3362,16 @@ async def test_long_reorg( print(f"pre-validating {len(blocks)} blocks") ssi = b.constants.SUB_SLOT_ITERS_STARTING diff = b.constants.DIFFICULTY_STARTING - pre_validation_results: list[PreValidationResult] = await pre_validate_blocks_multiprocessing( + futures = await pre_validate_blocks_multiprocessing( b.constants, - b, + AugmentedBlockchain(b), blocks, b.pool, {}, ValidationState(ssi, diff, None), validate_signatures=False, ) + pre_validation_results: list[PreValidationResult] = list(await asyncio.gather(*futures)) for i, block in enumerate(blocks): if block.height != 0 and len(block.finished_sub_slots) > 0: if block.finished_sub_slots[0].challenge_chain.new_sub_slot_iters is not None: @@ -3919,29 +3930,29 @@ async def test_reorg_flip_flop(empty_blockchain: Blockchain, bt: BlockTools) -> block1, block2 = b1, b2 counter += 1 - preval: list[PreValidationResult] = await pre_validate_blocks_multiprocessing( + futures = await pre_validate_blocks_multiprocessing( b.constants, - b, + AugmentedBlockchain(b), [block1], b.pool, {}, ValidationState(ssi, diff, None), validate_signatures=False, ) - + preval: list[PreValidationResult] = list(await asyncio.gather(*futures)) fork_info = ForkInfo(block1.height - 1, block1.height - 1, block1.prev_header_hash) _, err, _ = await b.add_block(block1, preval[0], None, sub_slot_iters=ssi, fork_info=fork_info) assert err is None - preval = await pre_validate_blocks_multiprocessing( + futures = await pre_validate_blocks_multiprocessing( b.constants, - b, + AugmentedBlockchain(b), [block2], b.pool, {}, ValidationState(ssi, diff, None), validate_signatures=False, ) - + preval = list(await asyncio.gather(*futures)) fork_info = ForkInfo(block2.height - 1, block2.height - 1, block2.prev_header_hash) _, err, _ = await b.add_block(block2, preval[0], None, sub_slot_iters=ssi, fork_info=fork_info) assert err is None @@ -3967,15 +3978,16 @@ async def test_get_tx_peak(default_400_blocks: list[FullBlock], empty_blockchain test_blocks = default_400_blocks[:100] ssi = bc.constants.SUB_SLOT_ITERS_STARTING diff = bc.constants.DIFFICULTY_STARTING - res = await pre_validate_blocks_multiprocessing( + futures = await pre_validate_blocks_multiprocessing( bc.constants, - bc, + AugmentedBlockchain(bc), test_blocks, bc.pool, {}, ValidationState(ssi, diff, None), validate_signatures=False, ) + res: list[PreValidationResult] = list(await asyncio.gather(*futures)) last_tx_block_record = None for b, prevalidation_res in zip(test_blocks, res): diff --git a/chia/_tests/build-init-files.py b/chia/_tests/build-init-files.py index 67b5840eec04..38f3269d4009 100755 --- a/chia/_tests/build-init-files.py +++ b/chia/_tests/build-init-files.py @@ -89,4 +89,4 @@ def command(verbose, root_str): raise click.ClickException("At least one __init__.py created or not a regular file") -command() # pylint: disable=no-value-for-parameter +command() diff --git a/chia/_tests/clvm/test_condition_codes.py b/chia/_tests/clvm/test_condition_codes.py index 8d20e4d387c4..af75f70f0b30 100644 --- a/chia/_tests/clvm/test_condition_codes.py +++ b/chia/_tests/clvm/test_condition_codes.py @@ -9,5 +9,5 @@ def test_condition_codes_is_complete() -> None: condition_codes_path = importlib_resources.files("chia.wallet.puzzles").joinpath("condition_codes.clib") contents = condition_codes_path.read_text(encoding="utf-8") - for name, value in ConditionOpcode.__members__.items(): - assert f"(defconstant {name} {int_from_bytes(value)})" in contents + for opcode in ConditionOpcode: + assert f"(defconstant {opcode.name} {int_from_bytes(opcode.value)})" in contents diff --git a/chia/_tests/cmds/cmd_test_utils.py b/chia/_tests/cmds/cmd_test_utils.py index 4a4acc7983eb..7ddf2571d7b2 100644 --- a/chia/_tests/cmds/cmd_test_utils.py +++ b/chia/_tests/cmds/cmd_test_utils.py @@ -440,7 +440,7 @@ def run_cli_command(capsys: object, chia_root: Path, command_list: list[str]) -> argv_temp = sys.argv try: sys.argv = ["chia", "--root-path", str(chia_root)] + command_list - chia_cli() # pylint: disable=no-value-for-parameter + chia_cli() except SystemExit as e: if e.code != 0: exited_cleanly = False diff --git a/chia/_tests/cmds/test_show.py b/chia/_tests/cmds/test_show.py index f2fe27545f20..3916ce8f6dbc 100644 --- a/chia/_tests/cmds/test_show.py +++ b/chia/_tests/cmds/test_show.py @@ -79,7 +79,7 @@ async def get_block(self, header_hash: bytes32) -> Optional[FullBlock]: return full_block -RPC_CLIENT_TO_USE = ShowFullNodeRpcClient() # pylint: disable=no-value-for-parameter +RPC_CLIENT_TO_USE = ShowFullNodeRpcClient() def test_chia_show(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: diff --git a/chia/_tests/cmds/wallet/test_coins.py b/chia/_tests/cmds/wallet/test_coins.py index ceca5f538753..4839fb900fcf 100644 --- a/chia/_tests/cmds/wallet/test_coins.py +++ b/chia/_tests/cmds/wallet/test_coins.py @@ -25,7 +25,7 @@ def test_coins_get_info(capsys: object, get_test_cli_clients: tuple[TestRpcClien # set RPC Client - inst_rpc_client = TestWalletRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = TestWalletRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client command_args = ["wallet", "coins", "list", FINGERPRINT_ARG, "-i1", "-u"] # these are various things that should be in the output @@ -68,7 +68,7 @@ async def combine_coins( self.add_to_log("combine_coins", (args, tx_config, timelock_info)) return CombineCoinsResponse([STD_UTX], [STD_TX]) - inst_rpc_client = CoinsCombineRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = CoinsCombineRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client assert sum(coin.amount for coin in STD_TX.removals) < 500_000_000_000 command_args = [ @@ -173,7 +173,7 @@ async def get_coin_records_by_names( else: return [] - inst_rpc_client = CoinsSplitRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = CoinsSplitRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client target_coin_id = test_coin.name() command_args = [ diff --git a/chia/_tests/cmds/wallet/test_dao.py b/chia/_tests/cmds/wallet/test_dao.py index 6f6292459401..68a2e6a2e6d3 100644 --- a/chia/_tests/cmds/wallet/test_dao.py +++ b/chia/_tests/cmds/wallet/test_dao.py @@ -66,7 +66,7 @@ async def create_new_dao_wallet( } ) - inst_rpc_client = DAOCreateRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = DAOCreateRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client command_args = [ "dao", @@ -175,7 +175,7 @@ async def get_transaction(self, transaction_id: bytes32) -> TransactionRecord: valid_times=parse_timelock_info(tuple()), ) - inst_rpc_client = DAOCreateRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = DAOCreateRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client get_id_args = ["dao", "get_id", FINGERPRINT_ARG, "-i 2"] @@ -345,7 +345,7 @@ async def get_transaction(self, transaction_id: bytes32) -> TransactionRecord: ) # List all proposals - inst_rpc_client = DAOCreateRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = DAOCreateRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client list_args = ["dao", "list_proposals", FINGERPRINT_ARG, "-i 2"] # these are various things that should be in the output @@ -549,7 +549,7 @@ async def get_transaction(self, transaction_id: bytes32) -> TransactionRecord: valid_times=parse_timelock_info(tuple()), ) - inst_rpc_client = DAOCreateRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = DAOCreateRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client lockup_args = ["dao", "lockup_coins", FINGERPRINT_ARG, "-i 2", "-a", "1000", "-m 0.1", "--reuse"] lockup_asserts = ["Transaction submitted to nodes"] diff --git a/chia/_tests/cmds/wallet/test_did.py b/chia/_tests/cmds/wallet/test_did.py index a26d797ee53b..be16113e39ef 100644 --- a/chia/_tests/cmds/wallet/test_did.py +++ b/chia/_tests/cmds/wallet/test_did.py @@ -45,7 +45,7 @@ async def create_new_did_wallet( ) return {"wallet_id": 3, "my_did": "did:chia:testdid123456"} - inst_rpc_client = DidCreateRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = DidCreateRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client command_args = [ "wallet", @@ -78,7 +78,7 @@ def test_did_sign_message(capsys: object, get_test_cli_clients: tuple[TestRpcCli test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client - inst_rpc_client = TestWalletRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = TestWalletRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client did_id = encode_puzzle_hash(get_bytes32(1), "did:chia:") message = b"hello did world!!" @@ -106,7 +106,7 @@ async def did_set_wallet_name(self, wallet_id: int, name: str) -> dict[str, Unio self.add_to_log("did_set_wallet_name", (wallet_id, name)) return {} - inst_rpc_client = DidSetNameRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = DidSetNameRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client w_id = 3 did_name = "testdid" @@ -129,7 +129,7 @@ async def get_did_id(self, wallet_id: int) -> dict[str, str]: self.add_to_log("get_did_id", (wallet_id,)) return {"my_did": encode_puzzle_hash(get_bytes32(1), "did:chia:"), "coin_id": get_bytes32(2).hex()} - inst_rpc_client = DidGetDidRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = DidGetDidRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client w_id = 3 expected_did = encode_puzzle_hash(get_bytes32(1), "did:chia:") @@ -165,7 +165,7 @@ async def get_did_info(self, coin_id: str, latest: bool) -> dict[str, object]: } return response - inst_rpc_client = DidGetDetailsRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = DidGetDetailsRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client did_coin_id_hex = get_bytes32(1).hex() command_args = ["wallet", "did", "get_details", FINGERPRINT_ARG, "--coin_id", did_coin_id_hex] @@ -207,7 +207,7 @@ async def update_did_metadata( self.add_to_log("update_did_metadata", (wallet_id, metadata, tx_config, push, timelock_info)) return DIDUpdateMetadataResponse([STD_UTX], [STD_TX], WalletSpendBundle([], G2Element()), uint32(wallet_id)) - inst_rpc_client = DidUpdateMetadataRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = DidUpdateMetadataRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client w_id = 3 json_mdata = '{"test": true}' @@ -252,7 +252,7 @@ async def find_lost_did( self.add_to_log("find_lost_did", (coin_id, recovery_list_hash, metadata, num_verification)) return {"success": True, "latest_coin_id": get_bytes32(2).hex()} - inst_rpc_client = DidFindLostRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = DidFindLostRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client c_id = get_bytes32(1) json_mdata = '{"test": true}' @@ -291,7 +291,7 @@ async def did_message_spend( self.add_to_log("did_message_spend", (wallet_id, tx_config, extra_conditions, push, timelock_info)) return DIDMessageSpendResponse([STD_UTX], [STD_TX], WalletSpendBundle([], G2Element())) - inst_rpc_client = DidMessageSpendRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = DidMessageSpendRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client w_id = 3 c_announcements = [get_bytes32(1), get_bytes32(2)] @@ -357,7 +357,7 @@ async def did_transfer_did( STD_TX.name, ) - inst_rpc_client = DidTransferRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = DidTransferRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client w_id = 3 t_address = encode_puzzle_hash(get_bytes32(1), "xch") diff --git a/chia/_tests/cmds/wallet/test_nft.py b/chia/_tests/cmds/wallet/test_nft.py index 5d9f74bebaaf..614e33074d2a 100644 --- a/chia/_tests/cmds/wallet/test_nft.py +++ b/chia/_tests/cmds/wallet/test_nft.py @@ -36,7 +36,7 @@ async def create_new_nft_wallet(self, did_id: str, name: Optional[str] = None) - self.add_to_log("create_new_nft_wallet", (did_id, name)) return {"wallet_id": 4} - inst_rpc_client = NFTCreateRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = NFTCreateRpcClient() did_id = encode_puzzle_hash(get_bytes32(2), "did:chia:") test_rpc_clients.wallet_rpc_client = inst_rpc_client command_args = ["wallet", "nft", "create", FINGERPRINT_ARG, "-ntest", "--did-id", did_id] @@ -54,7 +54,7 @@ def test_nft_sign_message(capsys: object, get_test_cli_clients: tuple[TestRpcCli # set RPC Client - inst_rpc_client = TestWalletRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = TestWalletRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client did_id = encode_puzzle_hash(get_bytes32(1), "nft") message = b"hello nft world!!" @@ -132,7 +132,7 @@ async def mint_nft( bytes32.zeros.hex(), ) - inst_rpc_client = NFTCreateRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = NFTCreateRpcClient() target_addr = encode_puzzle_hash(get_bytes32(2), "xch") test_rpc_clients.wallet_rpc_client = inst_rpc_client command_args = [ @@ -209,7 +209,7 @@ async def add_uri_to_nft( self.add_to_log("add_uri_to_nft", (wallet_id, nft_coin_id, key, uri, fee, tx_config, push, timelock_info)) return NFTAddURIResponse([STD_UTX], [STD_TX], uint32(wallet_id), WalletSpendBundle([], G2Element())) - inst_rpc_client = NFTAddUriRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = NFTAddUriRpcClient() nft_coin_id = get_bytes32(2).hex() test_rpc_clients.wallet_rpc_client = inst_rpc_client command_args = [ @@ -275,7 +275,7 @@ async def transfer_nft( WalletSpendBundle([], G2Element()), ) - inst_rpc_client = NFTTransferRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = NFTTransferRpcClient() nft_coin_id = get_bytes32(2).hex() target_address = encode_puzzle_hash(get_bytes32(2), "xch") test_rpc_clients.wallet_rpc_client = inst_rpc_client @@ -352,7 +352,7 @@ async def list_nfts(self, wallet_id: int, num: int = 50, start_index: int = 0) - ) return {"nft_list": nft_list} - inst_rpc_client = NFTListRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = NFTListRpcClient() launcher_ids = [bytes32([i] * 32).hex() for i in range(50, 60)] test_rpc_clients.wallet_rpc_client = inst_rpc_client command_args = [ @@ -401,7 +401,7 @@ async def set_nft_did( WalletSpendBundle([], G2Element()), ) - inst_rpc_client = NFTSetDidRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = NFTSetDidRpcClient() nft_coin_id = get_bytes32(2).hex() did_id = encode_puzzle_hash(get_bytes32(3), "did:chia:") test_rpc_clients.wallet_rpc_client = inst_rpc_client @@ -447,7 +447,7 @@ def test_nft_get_info(capsys: object, get_test_cli_clients: tuple[TestRpcClients # set RPC Client - inst_rpc_client = TestWalletRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = TestWalletRpcClient() nft_coin_id = get_bytes32(2).hex() test_rpc_clients.wallet_rpc_client = inst_rpc_client command_args = [ diff --git a/chia/_tests/cmds/wallet/test_notifications.py b/chia/_tests/cmds/wallet/test_notifications.py index 538015f0c1e6..ed9b9931f9e3 100644 --- a/chia/_tests/cmds/wallet/test_notifications.py +++ b/chia/_tests/cmds/wallet/test_notifications.py @@ -40,7 +40,7 @@ def __init__(self, name: str) -> None: return cast(TransactionRecord, FakeTransactionRecord(get_bytes32(2).hex())) - inst_rpc_client = NotificationsSendRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = NotificationsSendRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client target_ph = get_bytes32(1) target_addr = encode_puzzle_hash(target_ph, "xch") @@ -82,7 +82,7 @@ async def get_notifications(self, request: GetNotifications) -> GetNotifications [Notification(get_bytes32(1), bytes("hello", "utf8"), uint64(1000000000), uint32(50))] ) - inst_rpc_client = NotificationsGetRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = NotificationsGetRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client target_ph = get_bytes32(1) command_args = [ @@ -114,7 +114,7 @@ async def delete_notifications(self, ids: Optional[list[bytes32]] = None) -> boo self.add_to_log("delete_notifications", (ids,)) return True - inst_rpc_client = NotificationsDeleteRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = NotificationsDeleteRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client command_args = ["wallet", "notifications", "delete", FINGERPRINT_ARG, "--all"] # these are various things that should be in the output diff --git a/chia/_tests/cmds/wallet/test_vcs.py b/chia/_tests/cmds/wallet/test_vcs.py index f8d1aacdbfac..cef9ac5dcde4 100644 --- a/chia/_tests/cmds/wallet/test_vcs.py +++ b/chia/_tests/cmds/wallet/test_vcs.py @@ -55,7 +55,7 @@ async def vc_mint( ), ) - inst_rpc_client = VcsMintRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = VcsMintRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client did_bytes = get_bytes32(1) did_id = encode_puzzle_hash(did_bytes, "did:chia:") @@ -108,7 +108,7 @@ def __getattr__(self, item: str) -> Any: records = [cast(VCRecord, FakeVC())] return records, proofs - inst_rpc_client = VcsGetRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = VcsGetRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client command_args = ["wallet", "vcs", "get", FINGERPRINT_ARG, "-s10", "-c10"] # these are various things that should be in the output @@ -144,7 +144,7 @@ async def vc_spend( ) return VCSpendResponse([STD_UTX], [STD_TX]) - inst_rpc_client = VcsUpdateProofsRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = VcsUpdateProofsRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client vc_bytes = get_bytes32(1) target_ph = get_bytes32(2) @@ -196,7 +196,7 @@ async def vc_add_proofs(self, proofs: dict[str, Any]) -> None: self.add_to_log("vc_add_proofs", (proofs,)) return None - inst_rpc_client = VcsAddProofRevealRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = VcsAddProofRevealRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client new_proof = "test_proof" command_args = ["wallet", "vcs", "add_proof_reveal", FINGERPRINT_ARG, f"-p{new_proof}"] @@ -220,7 +220,7 @@ async def vc_get_proofs_for_root(self, root: bytes32) -> dict[str, Any]: self.add_to_log("vc_get_proofs_for_root", (root,)) return {"test_proof": "1", "test_proof2": "1"} - inst_rpc_client = VcsGetProofsForRootRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = VcsGetProofsForRootRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client proof_hash = get_bytes32(1) command_args = ["wallet", "vcs", "get_proofs_for_root", FINGERPRINT_ARG, f"-r{proof_hash.hex()}"] @@ -261,7 +261,7 @@ async def vc_revoke( self.add_to_log("vc_revoke", (vc_parent_id, tx_config, fee, push, timelock_info)) return VCRevokeResponse([STD_UTX], [STD_TX]) - inst_rpc_client = VcsRevokeRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = VcsRevokeRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client parent_id = get_bytes32(1) vc_id = get_bytes32(2) @@ -330,7 +330,7 @@ async def crcat_approve_pending( ) return [STD_TX] - inst_rpc_client = VcsApproveRCATSRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = VcsApproveRCATSRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client wallet_id = uint32(2) command_args = [ diff --git a/chia/_tests/cmds/wallet/test_wallet.py b/chia/_tests/cmds/wallet/test_wallet.py index 64407d2efe88..ebd1a51d6bd8 100644 --- a/chia/_tests/cmds/wallet/test_wallet.py +++ b/chia/_tests/cmds/wallet/test_wallet.py @@ -60,7 +60,7 @@ def test_get_transaction(capsys: object, get_test_cli_clients: tuple[TestRpcClients, Path]) -> None: test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client - inst_rpc_client = TestWalletRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = TestWalletRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client # get output with all options but verbose command_args = ["wallet", "get_transaction", WALLET_ID_ARG, "-tx", bytes32_hexstr] @@ -149,7 +149,7 @@ async def get_coin_records(self, request: GetCoinRecords) -> dict[str, Any]: "total_count": 1, } - inst_rpc_client = GetTransactionsWalletRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = GetTransactionsWalletRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client # get output with all options but verbose command_args = [ @@ -269,7 +269,7 @@ async def get_connections( } ] - inst_rpc_client = ShowRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = ShowRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client command_args = ["wallet", "show", FINGERPRINT_ARG] assert_list = [ @@ -376,7 +376,7 @@ async def cat_spend( ) return CATSpendResponse([STD_UTX], [STD_TX], STD_TX, STD_TX.name) - inst_rpc_client = SendWalletRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = SendWalletRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client # get output with all options but verbose addr = encode_puzzle_hash(get_bytes32(3), "xch") @@ -479,7 +479,7 @@ async def get_next_address(self, wallet_id: int, new_address: bool) -> str: return encode_puzzle_hash(get_bytes32(3), "xch") return encode_puzzle_hash(get_bytes32(4), "xch") - inst_rpc_client = GetAddressWalletRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = GetAddressWalletRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client # get output with all options but verbose addr1 = encode_puzzle_hash(get_bytes32(3), "xch") @@ -526,7 +526,7 @@ async def spend_clawback_coins( ], } - inst_rpc_client = ClawbackWalletRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = ClawbackWalletRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client tx_ids = [get_bytes32(3), get_bytes32(4), get_bytes32(5)] r_tx_ids_hex = [get_bytes32(6).hex(), get_bytes32(7).hex(), get_bytes32(8).hex()] @@ -560,7 +560,7 @@ async def delete_unconfirmed_transactions(self, wallet_id: int) -> None: self.add_to_log("delete_unconfirmed_transactions", (wallet_id,)) return None - inst_rpc_client = UnconfirmedTxRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = UnconfirmedTxRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client command_args = [ "wallet", @@ -586,7 +586,7 @@ async def get_current_derivation_index(self) -> str: self.add_to_log("get_current_derivation_index", ()) return str(520) - inst_rpc_client = GetDerivationIndexRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = GetDerivationIndexRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client command_args = [ "wallet", @@ -605,7 +605,7 @@ def test_sign_message(capsys: object, get_test_cli_clients: tuple[TestRpcClients test_rpc_clients, root_dir = get_test_cli_clients # set RPC Client - inst_rpc_client = TestWalletRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = TestWalletRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client xch_addr = encode_puzzle_hash(get_bytes32(1), "xch") message = b"hello world" @@ -633,7 +633,7 @@ async def extend_derivation_index(self, index: int) -> str: self.add_to_log("extend_derivation_index", (index,)) return str(index) - inst_rpc_client = UpdateDerivationIndexRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = UpdateDerivationIndexRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client index = 600 command_args = ["wallet", "update_derivation_index", FINGERPRINT_ARG, "--index", str(index)] @@ -657,7 +657,7 @@ async def set_cat_name(self, wallet_id: int, name: str) -> None: self.add_to_log("set_cat_name", (wallet_id, name)) return None # we don't need to do anything here - inst_rpc_client = AddTokenRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = AddTokenRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client command_args = ["wallet", "add_token", FINGERPRINT_ARG, "-nexamplecat"] assert_list = [f"Successfully renamed test1 with wallet_id 2 on key {FINGERPRINT} to examplecat"] @@ -765,7 +765,7 @@ async def create_offer_for_ids( return CreateOfferForIDsResponse([STD_UTX], [STD_TX], created_offer, trade_offer) - inst_rpc_client = MakeOfferRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = MakeOfferRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client request_cat_id = get_bytes32(2) request_nft_id = get_bytes32(2) @@ -913,7 +913,7 @@ async def get_all_offers( records.append(trade_offer) return records - inst_rpc_client = GetOffersRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = GetOffersRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client command_args = [ "wallet", @@ -999,7 +999,7 @@ async def take_offer( ), ) - inst_rpc_client = TakeOfferRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = TakeOfferRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client # these are various things that should be in the output cat1 = bytes32.from_hexstr("fd6a341ed39c05c31157d5bfea395a0e142398ced24deea1e82f836d7ec2909c") @@ -1084,7 +1084,7 @@ async def cancel_offer( self.add_to_log("cancel_offer", (trade_id, tx_config, fee, secure, push, timelock_info)) return CancelOfferResponse([STD_UTX], [STD_TX]) - inst_rpc_client = CancelOfferRpcClient() # pylint: disable=no-value-for-parameter + inst_rpc_client = CancelOfferRpcClient() test_rpc_clients.wallet_rpc_client = inst_rpc_client command_args = [ "wallet", diff --git a/chia/_tests/core/full_node/test_full_node.py b/chia/_tests/core/full_node/test_full_node.py index 3dff7aeed53c..b1a357839b92 100644 --- a/chia/_tests/core/full_node/test_full_node.py +++ b/chia/_tests/core/full_node/test_full_node.py @@ -24,7 +24,7 @@ from chia._tests.util.setup_nodes import SimulatorsAndWalletsServices from chia._tests.util.time_out_assert import time_out_assert, time_out_assert_custom_interval, time_out_messages from chia.consensus.block_body_validation import ForkInfo -from chia.consensus.multiprocess_validation import pre_validate_blocks_multiprocessing +from chia.consensus.multiprocess_validation import PreValidationResult, pre_validate_blocks_multiprocessing from chia.consensus.pot_iterations import is_overflow_block from chia.full_node.full_node import WalletUpdate from chia.full_node.full_node_api import FullNodeAPI @@ -63,6 +63,7 @@ from chia.types.spend_bundle import SpendBundle, estimate_fees from chia.types.unfinished_block import UnfinishedBlock from chia.types.validation_state import ValidationState +from chia.util.augmented_chain import AugmentedBlockchain from chia.util.errors import ConsensusError, Err from chia.util.hash import std_hash from chia.util.ints import uint8, uint16, uint32, uint64, uint128 @@ -430,15 +431,16 @@ async def check_transaction_confirmed(transaction) -> bool: for reorg_block in reog_blocks[:r]: await _validate_and_add_block_no_error(blockchain, reorg_block, fork_info=fork_info) for i in range(1, height): - results = await pre_validate_blocks_multiprocessing( + futures = await pre_validate_blocks_multiprocessing( blockchain.constants, - blockchain, + AugmentedBlockchain(blockchain), all_blocks[:i], blockchain.pool, {}, ValidationState(ssi, diff, None), validate_signatures=False, ) + results: list[PreValidationResult] = list(await asyncio.gather(*futures)) assert results is not None for result in results: assert result.error is None @@ -448,15 +450,16 @@ async def check_transaction_confirmed(transaction) -> bool: for block in all_blocks[:r]: await _validate_and_add_block_no_error(blockchain, block, fork_info=fork_info) for i in range(1, height): - results = await pre_validate_blocks_multiprocessing( + futures = await pre_validate_blocks_multiprocessing( blockchain.constants, - blockchain, + AugmentedBlockchain(blockchain), all_blocks[:i], blockchain.pool, {}, ValidationState(ssi, diff, None), validate_signatures=False, ) + results: list[PreValidationResult] = list(await asyncio.gather(*futures)) assert results is not None for result in results: assert result.error is None diff --git a/chia/_tests/farmer_harvester/test_farmer.py b/chia/_tests/farmer_harvester/test_farmer.py index 0cb53e22fa94..ab1ccec92354 100644 --- a/chia/_tests/farmer_harvester/test_farmer.py +++ b/chia/_tests/farmer_harvester/test_farmer.py @@ -348,7 +348,7 @@ def test_increment_pool_stats(case: IncrementPoolStatsCase) -> None: expected_pool_stats={ "points_found_since_start": 0, # Original item format here is (timestamp, value) but we'll ignore timestamp part - # so every `xxx_24h` item in this dict will be List[Any]. + # so every `xxx_24h` item in this dict will be list[Any]. "points_found_24h": [], "points_acknowledged_since_start": 0, "points_acknowledged_24h": [], @@ -380,7 +380,7 @@ def test_increment_pool_stats(case: IncrementPoolStatsCase) -> None: expected_pool_stats={ "points_found_since_start": 0, # Original item format here is (timestamp, value) but we'll ignore timestamp part - # so every `xxx_24h` item in this dict will be List[Any]. + # so every `xxx_24h` item in this dict will be list[Any]. "points_found_24h": [], "points_acknowledged_since_start": 0, "points_acknowledged_24h": [], @@ -411,7 +411,7 @@ def test_increment_pool_stats(case: IncrementPoolStatsCase) -> None: expected_pool_stats={ "points_found_since_start": 0, # Original item format here is (timestamp, value) but we'll ignore timestamp part - # so every `xxx_24h` item in this dict will be List[Any]. + # so every `xxx_24h` item in this dict will be list[Any]. "points_found_24h": [], "points_acknowledged_since_start": 0, "points_acknowledged_24h": [], @@ -442,7 +442,7 @@ def test_increment_pool_stats(case: IncrementPoolStatsCase) -> None: expected_pool_stats={ "points_found_since_start": 0, # Original item format here is (timestamp, value) but we'll ignore timestamp part - # so every `xxx_24h` item in this dict will be List[Any]. + # so every `xxx_24h` item in this dict will be list[Any]. "points_found_24h": [], "points_acknowledged_since_start": 0, "points_acknowledged_24h": [], @@ -473,7 +473,7 @@ def test_increment_pool_stats(case: IncrementPoolStatsCase) -> None: expected_pool_stats={ "points_found_since_start": 0, # Original item format here is (timestamp, value) but we'll ignore timestamp part - # so every `xxx_24h` item in this dict will be List[Any]. + # so every `xxx_24h` item in this dict will be list[Any]. "points_found_24h": [], "points_acknowledged_since_start": 0, "points_acknowledged_24h": [], @@ -504,7 +504,7 @@ def test_increment_pool_stats(case: IncrementPoolStatsCase) -> None: expected_pool_stats={ "points_found_since_start": 0, # Original item format here is (timestamp, value) but we'll ignore timestamp part - # so every `xxx_24h` item in this dict will be List[Any]. + # so every `xxx_24h` item in this dict will be list[Any]. "points_found_24h": [], "points_acknowledged_since_start": 0, "points_acknowledged_24h": [], @@ -535,7 +535,7 @@ def test_increment_pool_stats(case: IncrementPoolStatsCase) -> None: expected_pool_stats={ "points_found_since_start": 1, # Original item format here is (timestamp, value) but we'll ignore timestamp part - # so every `xxx_24h` item in this dict will be List[Any]. + # so every `xxx_24h` item in this dict will be list[Any]. "points_found_24h": [1], "points_acknowledged_since_start": 0, "points_acknowledged_24h": [], @@ -768,7 +768,7 @@ def override_pool_state(overrides: dict[str, Any]) -> dict[str, Any]: pool_state = { "points_found_since_start": 0, # Original item format here is (timestamp, value) but we'll ignore timestamp part - # so every `xxx_24h` item in this dict will be List[Any]. + # so every `xxx_24h` item in this dict will be list[Any]. "points_found_24h": [], "points_acknowledged_since_start": 0, "points_acknowledged_24h": [], diff --git a/chia/_tests/farmer_harvester/test_third_party_harvesters.py b/chia/_tests/farmer_harvester/test_third_party_harvesters.py index 6b102f81f152..334f753580b9 100644 --- a/chia/_tests/farmer_harvester/test_third_party_harvesters.py +++ b/chia/_tests/farmer_harvester/test_third_party_harvesters.py @@ -41,6 +41,7 @@ from chia.types.full_block import FullBlock from chia.types.peer_info import UnresolvedPeerInfo from chia.types.validation_state import ValidationState +from chia.util.augmented_chain import AugmentedBlockchain from chia.util.bech32m import decode_puzzle_hash from chia.util.hash import std_hash from chia.util.ints import uint8, uint32, uint64 @@ -437,15 +438,16 @@ async def add_test_blocks_into_full_node(blocks: list[FullBlock], full_node: Ful prev_ses_block = curr new_slot = len(block.finished_sub_slots) > 0 ssi, diff = get_next_sub_slot_iters_and_difficulty(full_node.constants, new_slot, prev_b, full_node.blockchain) - pre_validation_results: list[PreValidationResult] = await pre_validate_blocks_multiprocessing( + futures = await pre_validate_blocks_multiprocessing( full_node.blockchain.constants, - full_node.blockchain, + AugmentedBlockchain(full_node.blockchain), blocks, full_node.blockchain.pool, {}, ValidationState(ssi, diff, prev_ses_block), validate_signatures=True, ) + pre_validation_results: list[PreValidationResult] = list(await asyncio.gather(*futures)) assert pre_validation_results is not None and len(pre_validation_results) == len(blocks) for i in range(len(blocks)): block = blocks[i] diff --git a/chia/_tests/process_junit.py b/chia/_tests/process_junit.py index 8442c88464eb..abf65408ca16 100644 --- a/chia/_tests/process_junit.py +++ b/chia/_tests/process_junit.py @@ -345,5 +345,4 @@ def output_time_out_assert( if __name__ == "__main__": - # pylint: disable = no-value-for-parameter main() diff --git a/chia/_tests/util/full_sync.py b/chia/_tests/util/full_sync.py index 2427b7749e2d..a8d48573290f 100644 --- a/chia/_tests/util/full_sync.py +++ b/chia/_tests/util/full_sync.py @@ -28,6 +28,7 @@ from chia.types.full_block import FullBlock from chia.types.peer_info import PeerInfo from chia.types.validation_state import ValidationState +from chia.util.augmented_chain import AugmentedBlockchain from chia.util.config import load_config from chia.util.ints import uint16 @@ -212,6 +213,7 @@ async def run_sync_test( fork_height = block_batch[0].height - 1 header_hash = block_batch[0].prev_header_hash success, summary, err = await full_node.add_block_batch( + AugmentedBlockchain(full_node.blockchain), block_batch, peer_info, ForkInfo(fork_height, fork_height, header_hash), diff --git a/chia/_tests/util/gen_ssl_certs.py b/chia/_tests/util/gen_ssl_certs.py index b15ccf97eb9d..d80b27201304 100644 --- a/chia/_tests/util/gen_ssl_certs.py +++ b/chia/_tests/util/gen_ssl_certs.py @@ -43,9 +43,6 @@ def patched_write_ssl_cert_and_key(cert_path: Path, cert_data: bytes, key_path: private_ca_key: Optional[bytes] = None capture_cert_and_key = True - print("from typing import Tuple") - print() - make_ca_cert(Path("SSL_TEST_PRIVATE_CA_CRT"), Path("SSL_TEST_PRIVATE_CA_KEY")) capture_cert_and_key = False @@ -92,11 +89,11 @@ def patched_write_ssl_cert_and_key(cert_path: Path, cert_data: bytes, key_path: append_str = "" if suffix == "" else f"_{suffix}" print( - f"SSL_TEST_PRIVATE_CA_CERT_AND_KEY{append_str}: Tuple[bytes, bytes] = " + f"SSL_TEST_PRIVATE_CA_CERT_AND_KEY{append_str}: tuple[bytes, bytes] = " "(SSL_TEST_PRIVATE_CA_CRT, SSL_TEST_PRIVATE_CA_KEY)" ) print() - print(f"SSL_TEST_NODE_CERTS_AND_KEYS{append_str}: Dict[str, Dict[str, Dict[str, bytes]]] = {{") + print(f"SSL_TEST_NODE_CERTS_AND_KEYS{append_str}: dict[str, dict[str, dict[str, bytes]]] = {{") for node_name, cert_type_dict in node_certs_and_keys.items(): print(f' "{node_name}": {{') for cert_type, cert_dict in cert_type_dict.items(): diff --git a/chia/_tests/util/misc.py b/chia/_tests/util/misc.py index 751d9aacaa08..3ea473c59479 100644 --- a/chia/_tests/util/misc.py +++ b/chia/_tests/util/misc.py @@ -348,7 +348,7 @@ def __exit__( label=self.label, ) - ether.record_property( # pylint: disable=E1102 + ether.record_property( data.tag, json.dumps(data.marshal(), ensure_ascii=True, sort_keys=True), ) diff --git a/chia/_tests/util/time_out_assert.py b/chia/_tests/util/time_out_assert.py index a777c802675e..b1b99917077f 100644 --- a/chia/_tests/util/time_out_assert.py +++ b/chia/_tests/util/time_out_assert.py @@ -125,7 +125,7 @@ async def time_out_assert_custom_interval( timed_out=timed_out, ) - ether.record_property( # pylint: disable=E1102 + ether.record_property( data.tag, json.dumps(data.marshal(), ensure_ascii=True, sort_keys=True), ) diff --git a/chia/_tests/wallet/rpc/test_wallet_rpc.py b/chia/_tests/wallet/rpc/test_wallet_rpc.py index db4c17f9eec1..e2aad760b6e6 100644 --- a/chia/_tests/wallet/rpc/test_wallet_rpc.py +++ b/chia/_tests/wallet/rpc/test_wallet_rpc.py @@ -281,7 +281,7 @@ async def assert_push_tx_error(node_rpc: FullNodeRpcClient, tx: TransactionRecor try: await node_rpc.push_tx(spend_bundle) except ValueError as error: - error_string = error.args[0]["error"] # noqa: # pylint: disable=E1126 + error_string = error.args[0]["error"] if error_string.find("ASSERT_ANNOUNCE_CONSUMED_FAILED") == -1: raise ValueError from error @@ -413,7 +413,7 @@ async def test_push_transactions(wallet_rpc_environment: WalletRpcTestEnvironmen ).signed_tx resp_client = await client.push_transactions( - PushTransactions(transactions=[tx], fee=uint64(10)), # pylint: disable=unexpected-keyword-arg + PushTransactions(transactions=[tx], fee=uint64(10)), DEFAULT_TX_CONFIG, ) resp = await client.fetch( diff --git a/chia/_tests/wallet/sync/test_wallet_sync.py b/chia/_tests/wallet/sync/test_wallet_sync.py index 76055d61da66..80ba712e9e1c 100644 --- a/chia/_tests/wallet/sync/test_wallet_sync.py +++ b/chia/_tests/wallet/sync/test_wallet_sync.py @@ -44,6 +44,7 @@ from chia.types.full_block import FullBlock from chia.types.peer_info import PeerInfo from chia.types.validation_state import ValidationState +from chia.util.augmented_chain import AugmentedBlockchain from chia.util.hash import std_hash from chia.util.ints import uint32, uint64, uint128 from chia.wallet.nft_wallet.nft_wallet import NFTWallet @@ -364,6 +365,7 @@ async def test_long_sync_wallet( ) fork_height = blocks_reorg[-num_blocks - 10].height - 1 await full_node.add_block_batch( + AugmentedBlockchain(full_node.blockchain), blocks_reorg[-num_blocks - 10 : -1], PeerInfo("0.0.0.0", 0), ForkInfo(fork_height, fork_height, blocks_reorg[-num_blocks - 10].prev_header_hash), @@ -481,6 +483,7 @@ async def test_wallet_reorg_get_coinbase( full_node.constants, True, block_record, full_node.blockchain ) await full_node.add_block_batch( + AugmentedBlockchain(full_node.blockchain), blocks_reorg_2[-44:], PeerInfo("0.0.0.0", 0), ForkInfo(blocks_reorg_2[-45].height, blocks_reorg_2[-45].height, blocks_reorg_2[-45].header_hash), diff --git a/chia/_tests/wallet/test_singleton_lifecycle_fast.py b/chia/_tests/wallet/test_singleton_lifecycle_fast.py index 09e2318508c1..746571466dc7 100644 --- a/chia/_tests/wallet/test_singleton_lifecycle_fast.py +++ b/chia/_tests/wallet/test_singleton_lifecycle_fast.py @@ -56,7 +56,7 @@ def satisfies_hint(obj: T, type_hint: type[T]) -> bool: if not isinstance(obj, origin): return False if len(args) > 0: - # Tuple[T, ...] gets handled just like List[T] + # tuple[T, ...] gets handled just like list[T] if origin is list or (origin is tuple and args[-1] is Ellipsis): object_hint_pairs.extend((item, args[0]) for item in obj) elif origin is tuple: diff --git a/chia/_tests/wallet/test_transaction_store.py b/chia/_tests/wallet/test_transaction_store.py index 13378db63305..6264a66a2e31 100644 --- a/chia/_tests/wallet/test_transaction_store.py +++ b/chia/_tests/wallet/test_transaction_store.py @@ -37,11 +37,11 @@ [coin_2, coin_3], # additions [coin_1], # removals uint32(1), # wallet_id - [], # List[Tuple[str, uint8, Optional[str]]] sent_to + [], # list[tuple[str, uint8, Optional[str]]] sent_to bytes32(bytes32.random(module_seeded_random)), # trade_id uint32(TransactionType.OUTGOING_TX), # type bytes32(bytes32.random(module_seeded_random)), # name - [], # List[Tuple[bytes32, List[bytes]]] memos + [], # list[tuple[bytes32, list[bytes]]] memos ConditionValidTimes(), ) diff --git a/chia/_tests/wallet/test_wallet_state_manager.py b/chia/_tests/wallet/test_wallet_state_manager.py index 481173c0bd51..06332c1f0834 100644 --- a/chia/_tests/wallet/test_wallet_state_manager.py +++ b/chia/_tests/wallet/test_wallet_state_manager.py @@ -226,7 +226,7 @@ async def test_confirming_txs_not_ours(wallet_environments: WalletTestFramework) ) await env_2.rpc_client.push_transactions( - PushTransactions( # pylint: disable=unexpected-keyword-arg + PushTransactions( transactions=action_scope.side_effects.transactions, sign=False, ), diff --git a/chia/cmds/chia.py b/chia/cmds/chia.py index cf67f4f843a1..3c7291a72de1 100644 --- a/chia/cmds/chia.py +++ b/chia/cmds/chia.py @@ -136,7 +136,7 @@ def run_daemon_cmd(ctx: click.Context, wait_for_unlock: bool) -> None: def main() -> None: import chia.cmds.signer # noqa - cli() # pylint: disable=no-value-for-parameter + cli() if __name__ == "__main__": diff --git a/chia/cmds/cmd_classes.py b/chia/cmds/cmd_classes.py index 1aedf7ee963d..0771c10247c7 100644 --- a/chia/cmds/cmd_classes.py +++ b/chia/cmds/cmd_classes.py @@ -41,7 +41,7 @@ def option(*param_decls: str, **kwargs: Any) -> Any: else: default_default = MISSING - return field( # pylint: disable=invalid-field-call + return field( metadata=dict( option_args=dict( param_decls=tuple(param_decls), @@ -268,7 +268,7 @@ class WalletClientInfo: @command_helper class NeedsWalletRPC: - context: Context = field(default_factory=dict) # pylint: disable=invalid-field-call + context: Context = field(default_factory=dict) client_info: Optional[WalletClientInfo] = None wallet_rpc_port: Optional[int] = option( "-wp", @@ -294,7 +294,7 @@ async def wallet_rpc(self, **kwargs: Any) -> AsyncIterator[WalletClientInfo]: yield self.client_info else: if "root_path" not in kwargs: - kwargs["root_path"] = self.context["root_path"] # pylint: disable=unsubscriptable-object + kwargs["root_path"] = self.context["root_path"] async with get_wallet_client(self.wallet_rpc_port, self.fingerprint, **kwargs) as ( wallet_client, fp, diff --git a/chia/cmds/signer.py b/chia/cmds/signer.py index 85d90337bdc2..56b49e7982d8 100644 --- a/chia/cmds/signer.py +++ b/chia/cmds/signer.py @@ -151,7 +151,7 @@ class SPIn(_SPTranslation): def read_sp_input(self, typ: type[_T_ClvmStreamable]) -> list[_T_ClvmStreamable]: final_list: list[_T_ClvmStreamable] = [] - for filename in self.signer_protocol_input: # pylint: disable=not-an-iterable + for filename in self.signer_protocol_input: with open(Path(filename), "rb") as file: final_list.append( byte_deserialize_clvm_streamable( diff --git a/chia/cmds/wallet_funcs.py b/chia/cmds/wallet_funcs.py index e1c8da215f79..da407b33bfa7 100644 --- a/chia/cmds/wallet_funcs.py +++ b/chia/cmds/wallet_funcs.py @@ -423,7 +423,7 @@ async def make_offer( else: offer_dict: dict[Union[uint32, str], int] = {} driver_dict: dict[str, Any] = {} - printable_dict: dict[str, tuple[str, int, int]] = {} # Dict[asset_name, Tuple[amount, unit, multiplier]] + printable_dict: dict[str, tuple[str, int, int]] = {} # dict[asset_name, tuple[amount, unit, multiplier]] royalty_asset_dict: dict[Any, tuple[Any, uint16]] = {} fungible_asset_dict: dict[Any, uint64] = {} for item in [*offers, *requests]: diff --git a/chia/consensus/block_body_validation.py b/chia/consensus/block_body_validation.py index 017f916362c0..7bc5dbf41eaf 100644 --- a/chia/consensus/block_body_validation.py +++ b/chia/consensus/block_body_validation.py @@ -6,17 +6,16 @@ from dataclasses import dataclass, field from typing import Callable, Optional, Union -from chia_rs import AugSchemeMPL, BLSCache, G1Element, SpendBundleConditions +from chia_rs import AugSchemeMPL, BLSCache, G1Element, SpendBundleConditions, compute_merkle_set_root from chiabip158 import PyBIP158 from chia.consensus.block_record import BlockRecord from chia.consensus.block_rewards import calculate_base_farmer_reward, calculate_pool_reward -from chia.consensus.block_root_validation import validate_block_merkle_roots from chia.consensus.blockchain_interface import BlockRecordsProtocol from chia.consensus.coinbase import create_farmer_coin, create_pool_coin from chia.consensus.constants import ConsensusConstants from chia.full_node.mempool_check_conditions import mempool_check_time_locks -from chia.types.blockchain_format.coin import Coin +from chia.types.blockchain_format.coin import Coin, hash_coin_ids from chia.types.blockchain_format.sized_bytes import bytes32 from chia.types.coin_record import CoinRecord from chia.types.full_block import FullBlock @@ -149,6 +148,38 @@ def rollback(self, header_hash: bytes32, height: int) -> None: self.removals_since_fork = {k: v for k, v in self.removals_since_fork.items() if v.height <= height} +def validate_block_merkle_roots( + block_additions_root: bytes32, + block_removals_root: bytes32, + tx_additions: list[tuple[Coin, bytes32]], + tx_removals: list[bytes32], +) -> Optional[Err]: + # Create addition Merkle set + puzzlehash_coins_map: dict[bytes32, list[bytes32]] = {} + + for coin, coin_name in tx_additions: + if coin.puzzle_hash in puzzlehash_coins_map: + puzzlehash_coins_map[coin.puzzle_hash].append(coin_name) + else: + puzzlehash_coins_map[coin.puzzle_hash] = [coin_name] + + # Addition Merkle set contains puzzlehash and hash of all coins with that puzzlehash + additions_merkle_items: list[bytes32] = [] + for puzzle, coin_ids in puzzlehash_coins_map.items(): + additions_merkle_items.append(puzzle) + additions_merkle_items.append(hash_coin_ids(coin_ids)) + + additions_root = bytes32(compute_merkle_set_root(additions_merkle_items)) + removals_root = bytes32(compute_merkle_set_root(tx_removals)) + + if block_additions_root != additions_root: + return Err.BAD_ADDITION_ROOT + if block_removals_root != removals_root: + return Err.BAD_REMOVAL_ROOT + + return None + + async def validate_block_body( constants: ConsensusConstants, records: BlockRecordsProtocol, @@ -312,7 +343,7 @@ async def validate_block_body( if block.transactions_generator is None: return Err.INVALID_TRANSACTIONS_GENERATOR_REFS_ROOT, None - # The generator_refs_root must be the hash of the concatenation of the List[uint32] + # The generator_refs_root must be the hash of the concatenation of the list[uint32] generator_refs_hash = std_hash(b"".join([i.stream_to_bytes() for i in block.transactions_generator_ref_list])) if block.transactions_info.generator_refs_root != generator_refs_hash: return Err.INVALID_TRANSACTIONS_GENERATOR_REFS_ROOT, None diff --git a/chia/consensus/block_root_validation.py b/chia/consensus/block_root_validation.py deleted file mode 100644 index 215cf6e2e7a6..000000000000 --- a/chia/consensus/block_root_validation.py +++ /dev/null @@ -1,46 +0,0 @@ -from __future__ import annotations - -from typing import Optional - -from chia_rs import compute_merkle_set_root - -from chia.types.blockchain_format.coin import Coin, hash_coin_ids -from chia.types.blockchain_format.sized_bytes import bytes32 -from chia.util.errors import Err - - -def validate_block_merkle_roots( - block_additions_root: bytes32, - block_removals_root: bytes32, - tx_additions: Optional[list[tuple[Coin, bytes32]]] = None, - tx_removals: Optional[list[bytes32]] = None, -) -> Optional[Err]: - if tx_removals is None: - tx_removals = [] - if tx_additions is None: - tx_additions = [] - - # Create addition Merkle set - puzzlehash_coins_map: dict[bytes32, list[bytes32]] = {} - - for coin, coin_name in tx_additions: - if coin.puzzle_hash in puzzlehash_coins_map: - puzzlehash_coins_map[coin.puzzle_hash].append(coin_name) - else: - puzzlehash_coins_map[coin.puzzle_hash] = [coin_name] - - # Addition Merkle set contains puzzlehash and hash of all coins with that puzzlehash - additions_merkle_items: list[bytes32] = [] - for puzzle, coin_ids in puzzlehash_coins_map.items(): - additions_merkle_items.append(puzzle) - additions_merkle_items.append(hash_coin_ids(coin_ids)) - - additions_root = bytes32(compute_merkle_set_root(additions_merkle_items)) - removals_root = bytes32(compute_merkle_set_root(tx_removals)) - - if block_additions_root != additions_root: - return Err.BAD_ADDITION_ROOT - if block_removals_root != removals_root: - return Err.BAD_REMOVAL_ROOT - - return None diff --git a/chia/consensus/blockchain.py b/chia/consensus/blockchain.py index 9c3f31006b2d..97d65deaf05d 100644 --- a/chia/consensus/blockchain.py +++ b/chia/consensus/blockchain.py @@ -487,6 +487,10 @@ async def _reconsider_peak( if genesis: records_to_add = [block_record] + elif fork_info.block_hashes == [block_record.header_hash]: + # in the common case, we just add a block on top of the chain. Check + # for that here to avoid an unnecessary database lookup. + records_to_add = [block_record] else: records_to_add = await self.block_store.get_block_records_by_hash(fork_info.block_hashes) diff --git a/chia/consensus/multiprocess_validation.py b/chia/consensus/multiprocess_validation.py index 2450135afb71..c4941d6f5836 100644 --- a/chia/consensus/multiprocess_validation.py +++ b/chia/consensus/multiprocess_validation.py @@ -5,7 +5,7 @@ import logging import time import traceback -from collections.abc import Sequence +from collections.abc import Awaitable, Sequence from concurrent.futures import Executor from dataclasses import dataclass from typing import Optional @@ -14,7 +14,7 @@ from chia.consensus.block_header_validation import validate_finished_header_block from chia.consensus.block_record import BlockRecord -from chia.consensus.blockchain_interface import BlockRecordsProtocol, BlocksProtocol +from chia.consensus.blockchain_interface import BlockRecordsProtocol from chia.consensus.constants import ConsensusConstants from chia.consensus.full_block_to_block_record import block_to_block_record from chia.consensus.get_block_challenge import get_block_challenge @@ -132,7 +132,7 @@ def pre_validate_block( async def pre_validate_blocks_multiprocessing( constants: ConsensusConstants, - block_records: BlocksProtocol, + blockchain: AugmentedBlockchain, blocks: Sequence[FullBlock], pool: Executor, block_height_conds_map: dict[uint32, SpendBundleConditions], @@ -140,32 +140,40 @@ async def pre_validate_blocks_multiprocessing( *, wp_summaries: Optional[list[SubEpochSummary]] = None, validate_signatures: bool = True, -) -> list[PreValidationResult]: +) -> Sequence[Awaitable[PreValidationResult]]: """ This method must be called under the blockchain lock - If all the full blocks pass pre-validation, (only validates header), returns the list of required iters. - if any validation issue occurs, returns False. + The blocks passed to this function are submitted to be validated in the + executor passed in as "pool". The futures for those jobs are then returned. + When awaited, the return value is the PreValidationResult for each block. + The PreValidationResult indicates whether the block was valid or not. Args: constants: - pool: - constants: - block_records: + blockchain: The blockchain object to validate these blocks with respect to. + It's an AugmentedBlockchain to allow for previous batches of blocks to + be included, even if they haven't been added to the underlying blockchain + database yet. The blocks passed in will be added/augmented onto this blockchain. + pool: The executor to submit the validation jobs to blocks: list of full blocks to validate (must be connected to current chain) - npc_results + vs: The ValidationState refers to the state for the first block in the batch. + This is an in-out parameter that will be updated to the validation state + for the next batch of blocks. It includes subslot iterators, difficulty and + the previous sub epoch summary (ses) block. + wp_summaries: + validate_signatures: """ prev_b: Optional[BlockRecord] = None + async def return_error(error_code: Err) -> PreValidationResult: + return PreValidationResult(uint16(error_code.value), None, None, False, uint32(0)) + if blocks[0].height > 0: - curr = block_records.try_block_record(blocks[0].prev_header_hash) + curr = blockchain.try_block_record(blocks[0].prev_header_hash) if curr is None: - return [PreValidationResult(uint16(Err.INVALID_PREV_BLOCK_HASH.value), None, None, False, uint32(0))] + return [return_error(Err.INVALID_PREV_BLOCK_HASH)] prev_b = curr - # the agumented blockchain object will let us add temporary block records - # they won't actually be added to the underlying blockchain object - blockchain = AugmentedBlockchain(block_records) - futures = [] # Pool of workers to validate blocks concurrently @@ -186,7 +194,7 @@ async def pre_validate_blocks_multiprocessing( block.reward_chain_block.proof_of_space, constants, challenge, cc_sp_hash, height=block.height ) if q_str is None: - return [PreValidationResult(uint16(Err.INVALID_POSPACE.value), None, None, False, uint32(0))] + return [return_error(Err.INVALID_POSPACE)] required_iters: uint64 = calculate_iterations_quality( constants.DIFFICULTY_CONSTANT_FACTOR, @@ -207,13 +215,13 @@ async def pre_validate_blocks_multiprocessing( ) except ValueError: log.exception("block_to_block_record()") - return [PreValidationResult(uint16(Err.INVALID_SUB_EPOCH_SUMMARY.value), None, None, False, uint32(0))] + return [return_error(Err.INVALID_SUB_EPOCH_SUMMARY)] if block_rec.sub_epoch_summary_included is not None and wp_summaries is not None: next_ses = wp_summaries[int(block.height / constants.SUB_EPOCH_BLOCKS) - 1] if not block_rec.sub_epoch_summary_included.get_hash() == next_ses.get_hash(): log.error("sub_epoch_summary does not match wp sub_epoch_summary list") - return [PreValidationResult(uint16(Err.INVALID_SUB_EPOCH_SUMMARY.value), None, None, False, uint32(0))] + return [return_error(Err.INVALID_SUB_EPOCH_SUMMARY)] blockchain.add_extra_block(block, block_rec) # Temporarily add block to chain prev_b = block_rec @@ -227,11 +235,7 @@ async def pre_validate_blocks_multiprocessing( if block_generator is not None: previous_generators = block_generator.generator_refs except ValueError: - return [ - PreValidationResult( - uint16(Err.FAILED_GETTING_GENERATOR_MULTIPROCESSING.value), None, None, False, uint32(0) - ) - ] + return [return_error(Err.FAILED_GETTING_GENERATOR_MULTIPROCESSING)] futures.append( asyncio.get_running_loop().run_in_executor( @@ -250,5 +254,4 @@ async def pre_validate_blocks_multiprocessing( if block_rec.sub_epoch_summary_included is not None: vs.prev_ses_block = block_rec - # Collect all results into one flat list - return list(await asyncio.gather(*futures)) + return futures diff --git a/chia/daemon/server.py b/chia/daemon/server.py index 89a4dc918399..ea9c64948273 100644 --- a/chia/daemon/server.py +++ b/chia/daemon/server.py @@ -45,6 +45,7 @@ from chia.util.lock import Lockfile, LockfileError from chia.util.log_exceptions import log_exceptions from chia.util.network import WebServer +from chia.util.safe_cancel_task import cancel_task_safe from chia.util.service_groups import validate_service from chia.util.setproctitle import setproctitle from chia.util.ws_message import WsRpcMessage, create_payload, format_response @@ -241,19 +242,12 @@ async def _accept_signal( self.log.info("Received signal %s (%s), shutting down.", signal_.name, signal_.value) await self.stop() - def cancel_task_safe(self, task: Optional[asyncio.Task]): - if task is not None: - try: - task.cancel() - except Exception as e: - self.log.error(f"Error while canceling task.{e} {task}") - async def stop_command(self, websocket: WebSocketResponse, request: dict[str, Any] = {}) -> dict[str, Any]: return await self.stop() async def stop(self) -> dict[str, Any]: - self.cancel_task_safe(self.ping_job) - self.cancel_task_safe(self.state_changed_task) + cancel_task_safe(self.ping_job, self.log) + cancel_task_safe(self.state_changed_task, self.log) service_names = list(self.services.keys()) stop_service_jobs = [ asyncio.create_task(kill_service(self.root_path, self.services, s_n)) for s_n in service_names diff --git a/chia/daemon/windows_signal.py b/chia/daemon/windows_signal.py index 67f8789e6a57..74424a0723b7 100644 --- a/chia/daemon/windows_signal.py +++ b/chia/daemon/windows_signal.py @@ -21,8 +21,8 @@ import threading sigmap = { - signal.SIGINT: signal.CTRL_C_EVENT, # pylint: disable=E1101 - signal.SIGBREAK: signal.CTRL_BREAK_EVENT, # pylint: disable=E1101 + signal.SIGINT: signal.CTRL_C_EVENT, + signal.SIGBREAK: signal.CTRL_BREAK_EVENT, } def kill(pid: int, signum: signal.Signals) -> None: diff --git a/chia/data_layer/data_store.py b/chia/data_layer/data_store.py index ead6f196cfd2..2e7688097893 100644 --- a/chia/data_layer/data_store.py +++ b/chia/data_layer/data_store.py @@ -496,8 +496,7 @@ async def change_root_status(self, root: Root, status: Status = Status.PENDING) async def check(self) -> None: for check in self._checks: - # pylint seems to think these are bound methods not unbound methods. - await check(self) # pylint: disable=too-many-function-args + await check(self) async def _check_roots_are_incrementing(self) -> None: async with self.db_wrapper.reader() as reader: diff --git a/chia/data_layer/util/plugin.py b/chia/data_layer/util/plugin.py index 7f5aa8e7e568..12d1f8bb27ba 100644 --- a/chia/data_layer/util/plugin.py +++ b/chia/data_layer/util/plugin.py @@ -20,7 +20,7 @@ async def load_plugin_configurations(root_path: Path, config_type: str, log: log config_type (str): The type of plugins to load ('downloaders' or 'uploaders'). Returns: - List[PluginRemote]: A list of valid PluginRemote instances for the specified plugin type. + list[PluginRemote]: A list of valid PluginRemote instances for the specified plugin type. """ config_path = root_path / "plugins" / config_type config_path.mkdir(parents=True, exist_ok=True) # Ensure the config directory exists diff --git a/chia/full_node/full_node.py b/chia/full_node/full_node.py index 0de2ae677de0..10d58f82f89b 100644 --- a/chia/full_node/full_node.py +++ b/chia/full_node/full_node.py @@ -10,7 +10,7 @@ import sqlite3 import time import traceback -from collections.abc import AsyncIterator, Awaitable +from collections.abc import AsyncIterator, Awaitable, Sequence from multiprocessing.context import BaseContext from pathlib import Path from typing import TYPE_CHECKING, Any, Callable, ClassVar, Optional, TextIO, Union, cast, final @@ -47,7 +47,6 @@ from chia.full_node.subscriptions import PeerSubscriptions, peers_for_spend_bundle from chia.full_node.sync_store import Peak, SyncStore from chia.full_node.tx_processing_queue import TransactionQueue -from chia.full_node.util.safe_cancel_task import cancel_task_safe from chia.full_node.weight_proof import WeightProofHandler from chia.protocols import farmer_protocol, full_node_protocol, timelord_protocol, wallet_protocol from chia.protocols.farmer_protocol import SignagePointSourceData, SPSubSlotSourceData, SPVDFSourceData @@ -77,6 +76,7 @@ from chia.types.unfinished_block import UnfinishedBlock from chia.types.validation_state import ValidationState from chia.types.weight_proof import WeightProof +from chia.util.augmented_chain import AugmentedBlockchain from chia.util.bech32m import encode_puzzle_hash from chia.util.check_fork_next_block import check_fork_next_block from chia.util.config import process_config_start_method @@ -86,9 +86,9 @@ from chia.util.errors import ConsensusError, Err, TimestampError, ValidationError from chia.util.ints import uint8, uint32, uint64, uint128 from chia.util.limited_semaphore import LimitedSemaphore -from chia.util.log_exceptions import log_exceptions from chia.util.path import path_from_root from chia.util.profiler import enable_profiler, mem_profile_task, profile_task +from chia.util.safe_cancel_task import cancel_task_safe # This is the result of calling peak_post_processing, which is then fed into peak_post_processing_2 @@ -142,7 +142,7 @@ class FullNode: subscriptions: PeerSubscriptions = dataclasses.field(default_factory=PeerSubscriptions) _transaction_queue_task: Optional[asyncio.Task[None]] = None simulator_transaction_callback: Optional[Callable[[bytes32], Awaitable[None]]] = None - _sync_task: Optional[asyncio.Task[None]] = None + _sync_task_list: list[asyncio.Task[None]] = dataclasses.field(default_factory=list) _transaction_queue: Optional[TransactionQueue] = None _compact_vdf_sem: Optional[LimitedSemaphore] = None _new_peak_sem: Optional[LimitedSemaphore] = None @@ -358,15 +358,21 @@ async def manage(self) -> AsyncIterator[None]: if self._transaction_queue_task is not None: self._transaction_queue_task.cancel() cancel_task_safe(task=self.wallet_sync_task, log=self.log) - cancel_task_safe(task=self._sync_task, log=self.log) + for one_sync_task in self._sync_task_list: + if not one_sync_task.done(): + cancel_task_safe(task=one_sync_task, log=self.log) for task_id, task in list(self.full_node_store.tx_fetch_tasks.items()): cancel_task_safe(task, self.log) if self._init_weight_proof is not None: await asyncio.wait([self._init_weight_proof]) - if self._sync_task is not None: - with contextlib.suppress(asyncio.CancelledError): - await self._sync_task + for one_sync_task in self._sync_task_list: + if one_sync_task.done(): + self.log.info(f"Long sync task {one_sync_task.get_name()} done") + else: + with contextlib.suppress(asyncio.CancelledError): + self.log.info(f"Awaiting long sync task {one_sync_task.get_name()}") + await one_sync_task @property def block_store(self) -> BlockStore: @@ -604,7 +610,7 @@ async def short_sync_batch(self, peer: WSChiaConnection, start_height: uint32, t ) vs = ValidationState(ssi, diff, None) success, state_change_summary, err = await self.add_block_batch( - response.blocks, peer_info, fork_info, vs + AugmentedBlockchain(self.blockchain), response.blocks, peer_info, fork_info, vs ) if not success: raise ValueError(f"Error short batch syncing, failed to validate blocks {height}-{end_height}") @@ -770,9 +776,16 @@ async def new_peak(self, request: full_node_protocol.NewPeak, peer: WSChiaConnec if await self.short_sync_batch(peer, uint32(max(curr_peak_height - 6, 0)), request.height): return None + # Clean up task reference list (used to prevent gc from killing running tasks) + for oldtask in self._sync_task_list[:]: + if oldtask.done(): + self._sync_task_list.remove(oldtask) + # This is the either the case where we were not able to sync successfully (for example, due to the fork # point being in the past), or we are very far behind. Performs a long sync. - self._sync_task = asyncio.create_task(self._sync()) + # Multiple tasks may be created here. If we don't save all handles, a task could enter a sync object + # and be cleaned up by the GC, corrupting the sync object and possibly not allowing anything else in. + self._sync_task_list.append(asyncio.create_task(self._sync())) async def send_peak_to_timelords( self, peak_block: Optional[FullBlock] = None, peer: Optional[WSChiaConnection] = None @@ -1056,7 +1069,6 @@ async def sync_from_fork_point( peak_hash: bytes32, summaries: list[SubEpochSummary], ) -> None: - buffer_size = 4 self.log.info(f"Start syncing from fork point at {fork_point_height} up to {target_peak_sb_height}") peers_with_peak: list[WSChiaConnection] = self.get_peers_with_peak(peak_hash) fork_point_height = await check_fork_next_block( @@ -1081,7 +1093,17 @@ async def sync_from_fork_point( # normally "fork_point" or "fork_height" refers to the first common # block between the main chain and the fork. Here "fork_point_height" # seems to refer to the first diverging block - fork_info: Optional[ForkInfo] = None + # in case we're validating a reorg fork (i.e. not extending the + # main chain), we need to record the coin set from that fork in + # fork_info. Otherwise validation is very expensive, especially + # for deep reorgs + if fork_point_height > 0: + fork_hash = self.blockchain.height_to_hash(uint32(fork_point_height - 1)) + assert fork_hash is not None + else: + fork_hash = self.constants.GENESIS_CHALLENGE + fork_info = ForkInfo(fork_point_height - 1, fork_point_height - 1, fork_hash) + if fork_point_height == 0: ssi = self.constants.SUB_SLOT_ITERS_STARTING diff = self.constants.DIFFICULTY_STARTING @@ -1092,11 +1114,16 @@ async def sync_from_fork_point( prev_b = await self.blockchain.get_full_block(prev_b_hash) assert prev_b is not None ssi, diff, prev_ses_block = await self.get_sub_slot_iters_difficulty_ses_block(prev_b, None, None) - vs = ValidationState(ssi, diff, prev_ses_block) - async def fetch_block_batches( - batch_queue: asyncio.Queue[Optional[tuple[WSChiaConnection, list[FullBlock]]]] - ) -> None: + # we need an augmented blockchain to validate blocks in batches. The + # batch must be treated as if it's part of the chain to validate the + # blocks in it. We also need them to keep appearing as if they're part + # of the chain when pipelining the validation of blocks. We start + # validating the next batch while still adding the first batch to the + # chain. + blockchain = AugmentedBlockchain(self.blockchain) + + async def fetch_blocks(output_queue: asyncio.Queue[Optional[tuple[WSChiaConnection, list[FullBlock]]]]) -> None: start_height, end_height = 0, 0 new_peers_with_peak: list[WSChiaConnection] = peers_with_peak[:] try: @@ -1110,11 +1137,22 @@ async def fetch_block_batches( for peer in random.sample(new_peers_with_peak, len(new_peers_with_peak)): if peer.closed: continue + start = time.monotonic() response = await peer.call_api(FullNodeAPI.request_blocks, request, timeout=30) + end = time.monotonic() + if end - start > 5: + self.log.info(f"sync pipeline, peer took {end-start:0.2f} to respond to request_blocks") if response is None: await peer.close() elif isinstance(response, RespondBlocks): - await batch_queue.put((peer, response.blocks)) + start = time.monotonic() + await output_queue.put((peer, response.blocks)) + end = time.monotonic() + if end - start > 1: + self.log.info( + f"sync pipeline back-pressure. stalled {end-start:0.2f} " + "seconds on prevalidate block" + ) fetched = True break if fetched is False: @@ -1127,52 +1165,102 @@ async def fetch_block_batches( self.log.error(f"Exception fetching {start_height} to {end_height} from peer {e}") finally: # finished signal with None - await batch_queue.put(None) + await output_queue.put(None) + + async def validate_blocks( + input_queue: asyncio.Queue[Optional[tuple[WSChiaConnection, list[FullBlock]]]], + output_queue: asyncio.Queue[ + Optional[ + tuple[WSChiaConnection, ValidationState, list[Awaitable[PreValidationResult]], list[FullBlock]] + ] + ], + ) -> None: + nonlocal blockchain + nonlocal fork_info + first_batch = True + + vs = ValidationState(ssi, diff, prev_ses_block) - async def validate_block_batches( - inner_batch_queue: asyncio.Queue[Optional[tuple[WSChiaConnection, list[FullBlock]]]] + try: + while True: + res: Optional[tuple[WSChiaConnection, list[FullBlock]]] = await input_queue.get() + if res is None: + self.log.debug("done fetching blocks") + return None + peer, blocks = res + + # skip_blocks is only relevant at the start of the sync, + # to skip blocks we already have in the database (and have + # been validated). Once we start validating blocks, we + # shouldn't be skipping any. + blocks_to_validate = await self.skip_blocks(blockchain, blocks, fork_info, vs) + assert first_batch or len(blocks_to_validate) == len(blocks) + next_validation_state = copy.copy(vs) + + if len(blocks_to_validate) == 0: + continue + + first_batch = False + + futures: list[Awaitable[PreValidationResult]] = [] + for block in blocks_to_validate: + futures.extend( + await self.prevalidate_blocks( + blockchain, + [block], + peer.peer_info, + vs, + summaries, + ) + ) + start = time.monotonic() + await output_queue.put((peer, next_validation_state, list(futures), blocks_to_validate)) + end = time.monotonic() + if end - start > 1: + self.log.info(f"sync pipeline back-pressure. stalled {end-start:0.2f} seconds on add_block()") + except Exception: + self.log.exception("Exception validating") + finally: + # finished signal with None + await output_queue.put(None) + + async def ingest_blocks( + input_queue: asyncio.Queue[ + Optional[ + tuple[WSChiaConnection, ValidationState, list[Awaitable[PreValidationResult]], list[FullBlock]] + ] + ], ) -> None: nonlocal fork_info block_rate = 0 block_rate_time = time.monotonic() block_rate_height = -1 while True: - res: Optional[tuple[WSChiaConnection, list[FullBlock]]] = await inner_batch_queue.get() + res = await input_queue.get() if res is None: - self.log.debug("done fetching blocks") + self.log.debug("done validating blocks") return None - peer, blocks = res + peer, vs, futures, blocks = res start_height = blocks[0].height end_height = blocks[-1].height if block_rate_height == -1: block_rate_height = start_height - # in case we're validating a reorg fork (i.e. not extending the - # main chain), we need to record the coin set from that fork in - # fork_info. Otherwise validation is very expensive, especially - # for deep reorgs - peak: Optional[BlockRecord] - if fork_info is None: - if fork_point_height > 0: - fork_hash = self.blockchain.height_to_hash(uint32(fork_point_height - 1)) - assert fork_hash is not None - else: - fork_hash = self.constants.GENESIS_CHALLENGE - fork_info = ForkInfo(fork_point_height - 1, fork_point_height - 1, fork_hash) - + pre_validation_results = list(await asyncio.gather(*futures)) # The ValidationState object (vs) is an in-out parameter. the add_block_batch() # call will update it - success, state_change_summary, err = await self.add_block_batch( + state_change_summary, err = await self.add_prevalidated_blocks( + blockchain, blocks, - peer.get_peer_logging(), + pre_validation_results, fork_info, + peer.peer_info, vs, - summaries, ) - if success is False: + if err is not None: await peer.close(600) - raise ValueError(f"Failed to validate block batch {start_height} to {end_height}") + raise ValueError(f"Failed to validate block batch {start_height} to {end_height}: {err}") if end_height - block_rate_height > 100: now = time.monotonic() block_rate = int((end_height - block_rate_height) // (now - block_rate_time)) @@ -1180,7 +1268,7 @@ async def validate_block_batches( block_rate_height = end_height self.log.info(f"Added blocks {start_height} to {end_height} ({block_rate} blocks/s)") - peak = self.blockchain.get_peak() + peak: Optional[BlockRecord] = self.blockchain.get_peak() if state_change_summary is not None: assert peak is not None # Hints must be added to the DB. The other post-processing tasks are not required when syncing @@ -1197,17 +1285,32 @@ async def validate_block_batches( # height, in that case. self.blockchain.clean_block_record(end_height - self.constants.BLOCKS_CACHE_SIZE) - batch_queue_input: asyncio.Queue[Optional[tuple[WSChiaConnection, list[FullBlock]]]] = asyncio.Queue( - maxsize=buffer_size - ) - fetch_task = asyncio.Task(fetch_block_batches(batch_queue_input)) - validate_task = asyncio.Task(validate_block_batches(batch_queue_input)) + block_queue: asyncio.Queue[Optional[tuple[WSChiaConnection, list[FullBlock]]]] = asyncio.Queue(maxsize=10) + validation_queue: asyncio.Queue[ + Optional[tuple[WSChiaConnection, ValidationState, list[Awaitable[PreValidationResult]], list[FullBlock]]] + ] = asyncio.Queue(maxsize=10) + + fetch_task = asyncio.create_task(fetch_blocks(block_queue)) + validate_task = asyncio.create_task(validate_blocks(block_queue, validation_queue)) + ingest_task = asyncio.create_task(ingest_blocks(validation_queue)) try: - with log_exceptions(log=self.log, message="sync from fork point failed"): - await asyncio.gather(fetch_task, validate_task) + await asyncio.gather(fetch_task, validate_task, ingest_task) except Exception: - assert validate_task.done() - fetch_task.cancel() # no need to cancel validate_task, if we end up here validate_task is already done + self.log.exception("sync from fork point failed") + finally: + cancel_task_safe(validate_task, self.log) + cancel_task_safe(fetch_task) + cancel_task_safe(ingest_task) + + # we still need to await all the pending futures of the + # prevalidation steps posted to the thread pool + while not validation_queue.empty(): + result = validation_queue.get_nowait() + if result is None: + continue + + _, _, futures, _ = result + await asyncio.gather(*futures) def get_peers_with_peak(self, peak_hash: bytes32) -> list[WSChiaConnection]: peer_ids: set[bytes32] = self.sync_store.get_peers_that_have_peak([peak_hash]) @@ -1265,6 +1368,7 @@ async def update_wallets(self, wallet_update: WalletUpdate) -> None: async def add_block_batch( self, + blockchain: AugmentedBlockchain, all_blocks: list[FullBlock], peer_info: PeerInfo, fork_info: ForkInfo, @@ -1274,15 +1378,56 @@ async def add_block_batch( # Precondition: All blocks must be contiguous blocks, index i+1 must be the parent of index i # Returns a bool for success, as well as a StateChangeSummary if the peak was advanced + pre_validate_start = time.monotonic() + blockchain = AugmentedBlockchain(self.blockchain) + blocks_to_validate = await self.skip_blocks(blockchain, all_blocks, fork_info, vs) + + if len(blocks_to_validate) == 0: + return True, None, None + + futures = await self.prevalidate_blocks( + blockchain, + blocks_to_validate, + peer_info, + copy.copy(vs), + wp_summaries, + ) + pre_validation_results = list(await asyncio.gather(*futures)) + + agg_state_change_summary, err = await self.add_prevalidated_blocks( + blockchain, + blocks_to_validate, + pre_validation_results, + fork_info, + peer_info, + vs, + ) + + if agg_state_change_summary is not None: + self._state_changed("new_peak") + self.log.debug( + f"Total time for {len(blocks_to_validate)} blocks: {time.monotonic() - pre_validate_start}, " + f"advanced: True" + ) + return err is None, agg_state_change_summary, err + + async def skip_blocks( + self, + blockchain: AugmentedBlockchain, + all_blocks: list[FullBlock], + fork_info: ForkInfo, + vs: ValidationState, # in-out parameter + ) -> list[FullBlock]: + blocks_to_validate: list[FullBlock] = [] for i, block in enumerate(all_blocks): header_hash = block.header_hash - block_rec = await self.blockchain.get_block_record_from_db(header_hash) + block_rec = await blockchain.get_block_record_from_db(header_hash) if block_rec is None: blocks_to_validate = all_blocks[i:] break else: - self.blockchain.add_block_record(block_rec) + blockchain.add_block_record(block_rec) if block_rec.sub_epoch_summary_included: # already validated block, update sub slot iters, difficulty and prev sub epoch summary vs.prev_ses_block = block_rec @@ -1298,7 +1443,7 @@ async def add_block_batch( # we have already validated this block once, no need to do it again. # however, if this block is not part of the main chain, we need to # update the fork context with its additions and removals - if self.blockchain.height_to_hash(block.height) == header_hash: + if blockchain.height_to_hash(block.height) == header_hash: # we're on the main chain, just fast-forward the fork height fork_info.reset(block.height, header_hash) else: @@ -1307,50 +1452,57 @@ async def add_block_batch( # removals in fork_info. await self.blockchain.advance_fork_info(block, fork_info) await self.blockchain.run_single_block(block, fork_info) + return blocks_to_validate - if len(blocks_to_validate) == 0: - return True, None, None + async def prevalidate_blocks( + self, + blockchain: AugmentedBlockchain, + blocks_to_validate: list[FullBlock], + peer_info: PeerInfo, + vs: ValidationState, + wp_summaries: Optional[list[SubEpochSummary]] = None, + ) -> Sequence[Awaitable[PreValidationResult]]: + """ + This is a thin wrapper over pre_validate_blocks_multiprocessing(). + Args: + blockchain: + blocks_to_validate: + peer_info: + vs: The ValidationState for the first block in the batch. This is an in-out + parameter. It will be updated to be the validation state for the next + batch of blocks. + wp_summaries: + """ # Validates signatures in multiprocessing since they take a while, and we don't have cached transactions # for these blocks (unlike during normal operation where we validate one at a time) # We have to copy the ValidationState object to preserve it for the add_block() # call below. pre_validate_blocks_multiprocessing() will update the # object we pass in. - pre_validate_start = time.monotonic() - pre_validation_results: list[PreValidationResult] = await pre_validate_blocks_multiprocessing( - self.blockchain.constants, - self.blockchain, + return await pre_validate_blocks_multiprocessing( + self.constants, + blockchain, blocks_to_validate, self.blockchain.pool, {}, - copy.copy(vs), + vs, wp_summaries=wp_summaries, validate_signatures=True, ) - pre_validate_end = time.monotonic() - pre_validate_time = pre_validate_end - pre_validate_start - - self.log.log( - logging.WARNING if pre_validate_time > 10 else logging.DEBUG, - f"Block pre-validation: {pre_validate_end - pre_validate_start:0.2f}s " - f"CLVM: {sum(pvr.timing/1000.0 for pvr in pre_validation_results):0.2f}s " - f"({len(blocks_to_validate)} blocks, start height: {blocks_to_validate[0].height})", - ) - - for i, block in enumerate(blocks_to_validate): - if pre_validation_results[i].error is not None: - self.log.error( - f"Invalid block from peer: {peer_info} height {block.height} {Err(pre_validation_results[i].error)}" - ) - return ( - False, - None, - Err(pre_validation_results[i].error), - ) + async def add_prevalidated_blocks( + self, + blockchain: AugmentedBlockchain, + blocks_to_validate: list[FullBlock], + pre_validation_results: list[PreValidationResult], + fork_info: ForkInfo, + peer_info: PeerInfo, + vs: ValidationState, # in-out parameter + ) -> tuple[Optional[StateChangeSummary], Optional[Err]]: agg_state_change_summary: Optional[StateChangeSummary] = None block_record = await self.blockchain.get_block_record_from_db(blocks_to_validate[0].prev_header_hash) for i, block in enumerate(blocks_to_validate): + header_hash = block.header_hash assert vs.prev_ses_block is None or vs.prev_ses_block.height < block.height assert pre_validation_results[i].required_iters is not None state_change_summary: Optional[StateChangeSummary] @@ -1373,12 +1525,13 @@ async def add_block_batch( result, error, state_change_summary = await self.blockchain.add_block( block, pre_validation_results[i], None, vs.current_ssi, fork_info, prev_ses_block=vs.prev_ses_block ) + if error is None: + blockchain.remove_extra_block(header_hash) if result == AddBlockResult.NEW_PEAK: # since this block just added a new peak, we've don't need any # fork history from fork_info anymore - if fork_info is not None: - fork_info.reset(block.height, block.header_hash) + fork_info.reset(block.height, header_hash) assert state_change_summary is not None # Since all blocks are contiguous, we can simply append the rollback changes and npc results if agg_state_change_summary is None: @@ -1397,8 +1550,8 @@ async def add_block_batch( elif result == AddBlockResult.INVALID_BLOCK or result == AddBlockResult.DISCONNECTED_BLOCK: if error is not None: self.log.error(f"Error: {error}, Invalid block from peer: {peer_info} ") - return False, agg_state_change_summary, error - block_record = self.blockchain.block_record(block.header_hash) + return agg_state_change_summary, error + block_record = blockchain.block_record(header_hash) assert block_record is not None if block_record.sub_epoch_summary_included is not None: vs.prev_ses_block = block_record @@ -1406,11 +1559,7 @@ async def add_block_batch( await self.weight_proof_handler.create_prev_sub_epoch_segments() if agg_state_change_summary is not None: self._state_changed("new_peak") - self.log.debug( - f"Total time for {len(blocks_to_validate)} blocks: {time.monotonic() - pre_validate_start}, " - f"advanced: True" - ) - return True, agg_state_change_summary, None + return agg_state_change_summary, None async def get_sub_slot_iters_difficulty_ses_block( self, block: FullBlock, ssi: Optional[uint64], diff: Optional[uint64] @@ -1843,15 +1992,16 @@ async def add_block( prev_ses_block = curr new_slot = len(block.finished_sub_slots) > 0 ssi, diff = get_next_sub_slot_iters_and_difficulty(self.constants, new_slot, prev_b, self.blockchain) - pre_validation_results = await pre_validate_blocks_multiprocessing( + futures = await pre_validate_blocks_multiprocessing( self.blockchain.constants, - self.blockchain, + AugmentedBlockchain(self.blockchain), [block], self.blockchain.pool, block_height_conds_map, ValidationState(ssi, diff, prev_ses_block), validate_signatures=False, ) + pre_validation_results = list(await asyncio.gather(*futures)) added: Optional[AddBlockResult] = None pre_validation_time = time.monotonic() - validation_start try: @@ -2501,7 +2651,7 @@ async def add_transaction( await self.broadcast_added_tx(mempool_item, current_peer=peer) if self.simulator_transaction_callback is not None: # callback - await self.simulator_transaction_callback(spend_name) # pylint: disable=E1102 + await self.simulator_transaction_callback(spend_name) else: self.mempool_manager.remove_seen(spend_name) diff --git a/chia/full_node/full_node_store.py b/chia/full_node/full_node_store.py index c25aa5f26e24..3f77f5f4f7a1 100644 --- a/chia/full_node/full_node_store.py +++ b/chia/full_node/full_node_store.py @@ -92,7 +92,7 @@ class FullNodeStore: candidate_backup_blocks: dict[bytes32, tuple[uint32, UnfinishedBlock]] # Block hashes of unfinished blocks that we have seen recently. This is - # effectively a Set[bytes32] but in order to evict the oldest items first, + # effectively a set[bytes32] but in order to evict the oldest items first, # we use a Dict that preserves insertion order, and remove from the # beginning seen_unfinished_blocks: dict[bytes32, None] @@ -135,7 +135,7 @@ class FullNodeStore: recent_eos: LRUCache[bytes32, tuple[EndOfSubSlotBundle, float]] pending_tx_request: dict[bytes32, bytes32] # tx_id: peer_id - peers_with_tx: dict[bytes32, set[bytes32]] # tx_id: Set[peer_ids} + peers_with_tx: dict[bytes32, set[bytes32]] # tx_id: set[peer_ids} tx_fetch_tasks: dict[bytes32, asyncio.Task[None]] # Task id: task serialized_wp_message: Optional[Message] serialized_wp_message_tip: Optional[bytes32] diff --git a/chia/full_node/mempool_manager.py b/chia/full_node/mempool_manager.py index 093eb4108a5b..ad73c40b8582 100644 --- a/chia/full_node/mempool_manager.py +++ b/chia/full_node/mempool_manager.py @@ -338,7 +338,7 @@ async def add_spend_bundle( Returns: Optional[uint64]: cost of the entire transaction, None iff status is FAILED MempoolInclusionStatus: SUCCESS (should add to pool), FAILED (cannot add), and PENDING (can add later) - List[MempoolRemoveInfo]: conflicting mempool items which were removed, if no Err + list[MempoolRemoveInfo]: conflicting mempool items which were removed, if no Err Optional[Err]: Err is set iff status is FAILED """ @@ -401,7 +401,7 @@ async def validate_spend_bundle( Returns: Optional[Err]: Err is set if we cannot add to the mempool, None if we will immediately add to mempool Optional[MempoolItem]: the item to add (to mempool or pending pool) - List[bytes32]: conflicting mempool items to remove, if no Err + list[bytes32]: conflicting mempool items to remove, if no Err """ start_time = time.time() if self.peak is None: diff --git a/chia/full_node/weight_proof.py b/chia/full_node/weight_proof.py index f5f2c29dd713..1f03af771efb 100644 --- a/chia/full_node/weight_proof.py +++ b/chia/full_node/weight_proof.py @@ -679,7 +679,7 @@ def _sample_sub_epoch( weight_to_check: Optional[list[uint128]], ) -> bool: """ - weight_to_check: List[uint128] is expected to be sorted + weight_to_check: list[uint128] is expected to be sorted """ if weight_to_check is None: return True diff --git a/chia/protocols/wallet_protocol.py b/chia/protocols/wallet_protocol.py index d6a55d61473a..bdb06523e81a 100644 --- a/chia/protocols/wallet_protocol.py +++ b/chia/protocols/wallet_protocol.py @@ -208,9 +208,9 @@ class RegisterForPhUpdates(Streamable): # @streamable # @dataclass(frozen=True) # class RespondToPhUpdates(Streamable): -# puzzle_hashes: List[bytes32] +# puzzle_hashes: list[bytes32] # min_height: uint32 -# coin_states: List[CoinState] +# coin_states: list[CoinState] @streamable @@ -267,7 +267,7 @@ class RespondSESInfo(Streamable): @dataclass(frozen=True) class RequestFeeEstimates(Streamable): """ - time_targets (List[uint64]): Epoch timestamps in seconds to estimate FeeRates for. + time_targets (list[uint64]): Epoch timestamps in seconds to estimate FeeRates for. """ time_targets: list[uint64] diff --git a/chia/rpc/data_layer_rpc_api.py b/chia/rpc/data_layer_rpc_api.py index f7f7ca308e32..dbd93a18916b 100644 --- a/chia/rpc/data_layer_rpc_api.py +++ b/chia/rpc/data_layer_rpc_api.py @@ -46,7 +46,7 @@ def process_change(change: dict[str, Any]) -> dict[str, Any]: # TODO: A full class would likely be nice for this so downstream doesn't - # have to deal with maybe-present attributes or Dict[str, Any] hints. + # have to deal with maybe-present attributes or dict[str, Any] hints. reference_node_hash = change.get("reference_node_hash") if reference_node_hash is not None: reference_node_hash = bytes32.from_hexstr(reference_node_hash) diff --git a/chia/rpc/data_layer_rpc_util.py b/chia/rpc/data_layer_rpc_util.py index 315152222f40..33b8ca4846cb 100644 --- a/chia/rpc/data_layer_rpc_util.py +++ b/chia/rpc/data_layer_rpc_util.py @@ -26,9 +26,7 @@ async def __call__(self, request: dict[str, Any]) -> dict[str, Any]: class UnboundMarshalledRoute(Protocol): # Ignoring pylint complaint about the name of the first argument since this is a # special case. - async def __call__( # pylint: disable=E0213 - protocol_self, self: Any, request: MarshallableProtocol - ) -> MarshallableProtocol: + async def __call__(protocol_self, self: Any, request: MarshallableProtocol) -> MarshallableProtocol: pass diff --git a/chia/rpc/farmer_rpc_api.py b/chia/rpc/farmer_rpc_api.py index 8415a119781b..3e608080b89b 100644 --- a/chia/rpc/farmer_rpc_api.py +++ b/chia/rpc/farmer_rpc_api.py @@ -316,7 +316,7 @@ async def get_harvesters_summary(self, _: dict[str, object]) -> EndpointResult: return await self.service.get_harvesters(True) async def get_harvester_plots_valid(self, request_dict: dict[str, object]) -> EndpointResult: - # TODO: Consider having a extra List[PlotInfo] in Receiver to avoid rebuilding the list for each call + # TODO: Consider having a extra list[PlotInfo] in Receiver to avoid rebuilding the list for each call request = PlotInfoRequestData.from_json_dict(request_dict) plot_list = list(self.service.get_receiver(request.node_id).plots().values()) # Apply filter diff --git a/chia/rpc/wallet_request_types.py b/chia/rpc/wallet_request_types.py index 387475823741..1b091bd4effe 100644 --- a/chia/rpc/wallet_request_types.py +++ b/chia/rpc/wallet_request_types.py @@ -1,5 +1,3 @@ -# pylint: disable=invalid-field-call - from __future__ import annotations import sys diff --git a/chia/rpc/wallet_rpc_api.py b/chia/rpc/wallet_rpc_api.py index 56f32200f0c1..caea37779563 100644 --- a/chia/rpc/wallet_rpc_api.py +++ b/chia/rpc/wallet_rpc_api.py @@ -1576,7 +1576,7 @@ async def get_spendable_coins(self, request: dict[str, Any]) -> EndpointResult: excluded_coin_amounts = [] excluded_coins_input: Optional[dict[str, dict[str, Any]]] = request.get("excluded_coins") if excluded_coins_input is not None: - excluded_coins = [Coin.from_json_dict(json_coin) for json_coin in excluded_coins_input] + excluded_coins = [Coin.from_json_dict(json_coin) for json_coin in excluded_coins_input.values()] else: excluded_coins = [] excluded_coin_ids_input: Optional[list[str]] = request.get("excluded_coin_ids") @@ -3186,7 +3186,7 @@ async def dao_create_proposal( return { "success": True, # Semantics guarantee proposal_id here - "proposal_id": proposal_id, # pylint: disable=possibly-used-before-assignment + "proposal_id": proposal_id, "tx_id": None, # tx_endpoint wrapper will take care of this "tx": None, # tx_endpoint wrapper will take care of this "transactions": None, # tx_endpoint wrapper will take care of this diff --git a/chia/server/chia_policy.py b/chia/server/chia_policy.py index 25a1a8dfaa14..6073f69c173e 100644 --- a/chia/server/chia_policy.py +++ b/chia/server/chia_policy.py @@ -261,7 +261,7 @@ async def _chia_accept_loop(self, listener: socket.socket) -> tuple[socket.socke try: return await self._chia_accept(listener) except OSError as exc: - if exc.winerror not in ( # pylint: disable=E1101 + if exc.winerror not in ( _winapi.ERROR_NETNAME_DELETED, _winapi.ERROR_OPERATION_ABORTED, ): @@ -269,7 +269,7 @@ async def _chia_accept_loop(self, listener: socket.socket) -> tuple[socket.socke def _chia_accept(self, listener: socket.socket) -> asyncio.Future[tuple[socket.socket, tuple[object, ...]]]: self._register_with_iocp(listener) - conn = self._get_accept_socket(listener.family) # pylint: disable=assignment-from-no-return + conn = self._get_accept_socket(listener.family) ov = _overlapped.Overlapped(_winapi.NULL) ov.AcceptEx(listener.fileno(), conn.fileno()) @@ -292,13 +292,13 @@ async def accept_coro(self: ChiaProactor, future: asyncio.Future[object], conn: raise except OSError as exc: # https://github.com/python/cpython/issues/93821#issuecomment-1157945855 - if exc.winerror not in ( # pylint: disable=E1101 + if exc.winerror not in ( _winapi.ERROR_NETNAME_DELETED, _winapi.ERROR_OPERATION_ABORTED, ): raise - future = self._register(ov, listener, finish_accept) # pylint: disable=assignment-from-no-return + future = self._register(ov, listener, finish_accept) coro = accept_coro(self, future, conn) asyncio.ensure_future(coro, loop=self._loop) return future diff --git a/chia/server/node_discovery.py b/chia/server/node_discovery.py index 31f2d204386f..57afa4f204ae 100644 --- a/chia/server/node_discovery.py +++ b/chia/server/node_discovery.py @@ -26,6 +26,7 @@ from chia.util.ints import uint16, uint64 from chia.util.ip_address import IPAddress from chia.util.network import resolve +from chia.util.safe_cancel_task import cancel_task_safe MAX_PEERS_RECEIVED_PER_REQUEST = 1000 MAX_TOTAL_PEERS_RECEIVED = 3000 @@ -103,21 +104,14 @@ async def start_tasks(self) -> None: async def _close_common(self) -> None: self.is_closed = True - self.cancel_task_safe(self.connect_peers_task) - self.cancel_task_safe(self.serialize_task) - self.cancel_task_safe(self.cleanup_task) + cancel_task_safe(self.connect_peers_task, self.log) + cancel_task_safe(self.serialize_task, self.log) + cancel_task_safe(self.cleanup_task, self.log) for t in self.pending_tasks: - self.cancel_task_safe(t) + cancel_task_safe(t, self.log) if len(self.pending_tasks) > 0: await asyncio.wait(self.pending_tasks) - def cancel_task_safe(self, task: Optional[asyncio.Task[None]]) -> None: - if task is not None: - try: - task.cancel() - except Exception as e: - self.log.error(f"Error while canceling task.{e} {task}") - async def on_connect(self, peer: WSChiaConnection) -> None: if ( peer.is_outbound is False @@ -534,8 +528,8 @@ async def start(self) -> None: async def close(self) -> None: await self._close_common() - self.cancel_task_safe(self.self_advertise_task) - self.cancel_task_safe(self.address_relay_task) + cancel_task_safe(self.self_advertise_task, self.log) + cancel_task_safe(self.address_relay_task, self.log) async def _periodically_self_advertise_and_clean_data(self) -> None: while not self.is_closed: diff --git a/chia/simulator/add_blocks_in_batches.py b/chia/simulator/add_blocks_in_batches.py index 965e40f28186..7919fbb59098 100644 --- a/chia/simulator/add_blocks_in_batches.py +++ b/chia/simulator/add_blocks_in_batches.py @@ -9,6 +9,7 @@ from chia.types.full_block import FullBlock from chia.types.peer_info import PeerInfo from chia.types.validation_state import ValidationState +from chia.util.augmented_chain import AugmentedBlockchain from chia.util.batches import to_batches @@ -39,6 +40,7 @@ async def add_blocks_in_batches( print(f"main chain: {b.height:4} weight: {b.weight}") # vs is updated by the call to add_block_batch() success, state_change_summary, err = await full_node.add_block_batch( + AugmentedBlockchain(full_node.blockchain), block_batch.entries, PeerInfo("0.0.0.0", 0), fork_info, diff --git a/chia/simulator/full_node_simulator.py b/chia/simulator/full_node_simulator.py index ef42d7b0a15c..6d1495906cef 100644 --- a/chia/simulator/full_node_simulator.py +++ b/chia/simulator/full_node_simulator.py @@ -26,6 +26,7 @@ from chia.types.full_block import FullBlock from chia.types.spend_bundle import SpendBundle from chia.types.validation_state import ValidationState +from chia.util.augmented_chain import AugmentedBlockchain from chia.util.config import lock_and_load_config, save_config from chia.util.ints import uint8, uint32, uint64, uint128 from chia.util.timing import adjusted_timeout, backoff_times @@ -174,15 +175,16 @@ async def farm_new_transaction_block( current_blocks = await self.get_all_full_blocks() if len(current_blocks) == 0: genesis = self.bt.get_consecutive_blocks(uint8(1))[0] - pre_validation_results: list[PreValidationResult] = await pre_validate_blocks_multiprocessing( + futures = await pre_validate_blocks_multiprocessing( self.full_node.blockchain.constants, - self.full_node.blockchain, + AugmentedBlockchain(self.full_node.blockchain), [genesis], self.full_node.blockchain.pool, {}, ValidationState(ssi, diff, None), validate_signatures=True, ) + pre_validation_results: list[PreValidationResult] = list(await asyncio.gather(*futures)) assert pre_validation_results is not None fork_info = ForkInfo(-1, -1, self.full_node.constants.GENESIS_CHALLENGE) await self.full_node.blockchain.add_block( @@ -237,15 +239,16 @@ async def farm_new_block(self, request: FarmNewBlockProtocol, force_wait_for_tim current_blocks = await self.get_all_full_blocks() if len(current_blocks) == 0: genesis = self.bt.get_consecutive_blocks(uint8(1))[0] - pre_validation_results: list[PreValidationResult] = await pre_validate_blocks_multiprocessing( + futures = await pre_validate_blocks_multiprocessing( self.full_node.blockchain.constants, - self.full_node.blockchain, + AugmentedBlockchain(self.full_node.blockchain), [genesis], self.full_node.blockchain.pool, {}, ValidationState(ssi, diff, None), validate_signatures=True, ) + pre_validation_results: list[PreValidationResult] = list(await asyncio.gather(*futures)) assert pre_validation_results is not None fork_info = ForkInfo(-1, -1, self.full_node.constants.GENESIS_CHALLENGE) await self.full_node.blockchain.add_block( diff --git a/chia/types/eligible_coin_spends.py b/chia/types/eligible_coin_spends.py index b14b1612adb2..fdc27c8bc447 100644 --- a/chia/types/eligible_coin_spends.py +++ b/chia/types/eligible_coin_spends.py @@ -98,7 +98,7 @@ def perform_the_fast_forward( Returns: CoinSpend: the new coin spend after performing the fast forward - List[Coin]: the updated additions that point to the new coin to spend + list[Coin]: the updated additions that point to the new coin to spend Raises: ValueError if none of the additions are considered to be the singleton's @@ -158,9 +158,9 @@ def get_deduplication_info( max_cost: the maximum limit when running for cost Returns: - List[CoinSpend]: list of unique coin spends in this mempool item + list[CoinSpend]: list of unique coin spends in this mempool item uint64: the cost we're saving by deduplicating eligible coins - List[Coin]: list of unique additions in this mempool item + list[Coin]: list of unique additions in this mempool item Raises: ValueError to skip the mempool item we're currently in, if it's diff --git a/chia/util/augmented_chain.py b/chia/util/augmented_chain.py index fd4fde1fbdb7..073a8d427456 100644 --- a/chia/util/augmented_chain.py +++ b/chia/util/augmented_chain.py @@ -45,6 +45,11 @@ def add_extra_block(self, block: FullBlock, block_record: BlockRecord) -> None: self._extra_blocks[block_record.header_hash] = (block, block_record) self._height_to_hash[block_record.height] = block_record.header_hash + def remove_extra_block(self, hh: bytes32) -> None: + if hh in self._extra_blocks: + block_record = self._extra_blocks.pop(hh)[1] + del self._height_to_hash[block_record.height] + # BlocksProtocol async def lookup_block_generators(self, header_hash: bytes32, generator_refs: set[uint32]) -> dict[uint32, bytes]: diff --git a/chia/util/db_wrapper.py b/chia/util/db_wrapper.py index 4453fc4d747e..99176aa3a794 100644 --- a/chia/util/db_wrapper.py +++ b/chia/util/db_wrapper.py @@ -114,8 +114,7 @@ def get_host_parameter_limit() -> int: if sys.version_info >= (3, 11): connection = sqlite3.connect(":memory:") - # sqlite3.SQLITE_LIMIT_VARIABLE_NUMBER exists in 3.11, pylint - limit_number = sqlite3.SQLITE_LIMIT_VARIABLE_NUMBER # pylint: disable=E1101 + limit_number = sqlite3.SQLITE_LIMIT_VARIABLE_NUMBER host_parameter_limit = connection.getlimit(limit_number) else: # guessing based on defaults, seems you can't query diff --git a/chia/util/dump_keyring.py b/chia/util/dump_keyring.py index e4c9b2a3004d..11b8831d3c7f 100755 --- a/chia/util/dump_keyring.py +++ b/chia/util/dump_keyring.py @@ -86,7 +86,7 @@ def dump(keyring_file: str, full_payload: bool, passphrase_file: Optional[TextIO def main() -> None: colorama.init() - dump() # pylint: disable=no-value-for-parameter + dump() if __name__ == "__main__": diff --git a/chia/util/file_keyring.py b/chia/util/file_keyring.py index a5fbfdedbda1..e8148e6c123a 100644 --- a/chia/util/file_keyring.py +++ b/chia/util/file_keyring.py @@ -6,7 +6,6 @@ import contextlib import os import shutil -import sys import threading from collections.abc import Iterator from dataclasses import asdict, dataclass, field @@ -55,18 +54,6 @@ def symmetric_key_from_passphrase(passphrase: str, salt: bytes) -> bytes: return pbkdf2_hmac("sha256", passphrase.encode(), salt, HASH_ITERS) -def get_symmetric_key(salt: bytes) -> bytes: - from chia.cmds.passphrase_funcs import obtain_current_passphrase - - try: - passphrase = obtain_current_passphrase(use_passphrase_cache=True) - except Exception as e: - print(f"Unable to unlock the keyring: {e}") - sys.exit(1) - - return symmetric_key_from_passphrase(passphrase, salt) - - def encrypt_data(input_data: bytes, key: bytes, nonce: bytes) -> bytes: encryptor = ChaCha20Poly1305(key) data = encryptor.encrypt(nonce, CHECKBYTES_VALUE + input_data, None) diff --git a/chia/full_node/util/safe_cancel_task.py b/chia/util/safe_cancel_task.py similarity index 95% rename from chia/full_node/util/safe_cancel_task.py rename to chia/util/safe_cancel_task.py index 2fb08772f154..b029e31ad1b4 100644 --- a/chia/full_node/util/safe_cancel_task.py +++ b/chia/util/safe_cancel_task.py @@ -1,3 +1,5 @@ +# Package: utils + from __future__ import annotations import asyncio diff --git a/chia/util/streamable.py b/chia/util/streamable.py index 6ddde747f7d9..3b4684789185 100644 --- a/chia/util/streamable.py +++ b/chia/util/streamable.py @@ -530,8 +530,8 @@ class Streamable: An item is one of: * primitive - * Tuple[item1, .. itemx] - * List[item1, .. itemx] + * tuple[item1, .. itemx] + * list[item1, .. itemx] * Optional[item] * Custom item diff --git a/chia/util/task_timing.py b/chia/util/task_timing.py index 3ebce35a6779..e951afb10a4b 100644 --- a/chia/util/task_timing.py +++ b/chia/util/task_timing.py @@ -128,7 +128,7 @@ def __init__(self, name: str, file: str) -> None: def get_stack(frame: FrameType) -> str: ret = "" code = frame.f_code - while code.co_flags & inspect.CO_COROUTINE: # pylint: disable=no-member + while code.co_flags & inspect.CO_COROUTINE: ret = f"/{code.co_name}{ret}" if frame.f_back is None: break @@ -162,7 +162,7 @@ def trace_fun(frame: FrameType, event: str, arg: Any) -> None: return # we only care about instrumenting co-routines - if (frame.f_code.co_flags & inspect.CO_COROUTINE) == 0: # pylint: disable=no-member + if (frame.f_code.co_flags & inspect.CO_COROUTINE) == 0: # with open("instrumentation.log", "a") as f: # f.write(f"[1] {event} {get_fun(frame)}\n") return diff --git a/chia/util/virtual_project_analysis.py b/chia/util/virtual_project_analysis.py index 6422dbe74f16..d5901fd34ba8 100644 --- a/chia/util/virtual_project_analysis.py +++ b/chia/util/virtual_project_analysis.py @@ -1,4 +1,4 @@ -# Package: utils +# Package: virtual_project_analysis from __future__ import annotations diff --git a/chia/wallet/dao_wallet/dao_wallet.py b/chia/wallet/dao_wallet/dao_wallet.py index 2b04cf4dea61..03b76aaca93e 100644 --- a/chia/wallet/dao_wallet/dao_wallet.py +++ b/chia/wallet/dao_wallet/dao_wallet.py @@ -1473,7 +1473,6 @@ async def create_proposal_close_spend( spend_bundle = WalletSpendBundle([proposal_cs, treasury_cs], AugSchemeMPL.aggregate([])) else: # TODO: maybe we can refactor this to provide clarity around timer_cs having been defined - # pylint: disable-next=E0606 spend_bundle = WalletSpendBundle([proposal_cs, timer_cs, treasury_cs], AugSchemeMPL.aggregate([])) if fee > 0: await self.standard_wallet.create_tandem_xch_tx(fee, action_scope) diff --git a/chia/wallet/nft_wallet/nft_puzzles.py b/chia/wallet/nft_wallet/nft_puzzles.py index 90dc670e7570..94e88a0d38b0 100644 --- a/chia/wallet/nft_wallet/nft_puzzles.py +++ b/chia/wallet/nft_wallet/nft_puzzles.py @@ -98,13 +98,13 @@ async def get_nft_info_from_puzzle(nft_coin_info: NFTCoinInfo, config: dict[str, assert uncurried_nft is not None data_uris: list[str] = [] - for uri in uncurried_nft.data_uris.as_python(): # pylint: disable=E1133 + for uri in uncurried_nft.data_uris.as_python(): data_uris.append(str(uri, "utf-8")) meta_uris: list[str] = [] - for uri in uncurried_nft.meta_uris.as_python(): # pylint: disable=E1133 + for uri in uncurried_nft.meta_uris.as_python(): meta_uris.append(str(uri, "utf-8")) license_uris: list[str] = [] - for uri in uncurried_nft.license_uris.as_python(): # pylint: disable=E1133 + for uri in uncurried_nft.license_uris.as_python(): license_uris.append(str(uri, "utf-8")) off_chain_metadata: Optional[str] = None nft_info = NFTInfo( diff --git a/chia/wallet/puzzles/clawback/drivers.py b/chia/wallet/puzzles/clawback/drivers.py index acf63ef0db7f..fcc9875aee72 100644 --- a/chia/wallet/puzzles/clawback/drivers.py +++ b/chia/wallet/puzzles/clawback/drivers.py @@ -76,7 +76,7 @@ def create_merkle_proof(merkle_tree: MerkleTree, puzzle_hash: bytes32) -> Progra To spend a p2_1_of_n clawback we recreate the full merkle tree The required proof is then selected from the merkle tree based on the puzzle_hash of the puzzle we want to execute - Returns a proof: (int, List[bytes32]) which can be provided to the p2_1_of_n solution + Returns a proof: (int, list[bytes32]) which can be provided to the p2_1_of_n solution """ proof = merkle_tree.generate_proof(puzzle_hash) program: Program = Program.to((proof[0], proof[1][0])) diff --git a/chia/wallet/trade_manager.py b/chia/wallet/trade_manager.py index b29c89a6ca99..cac727ad22e8 100644 --- a/chia/wallet/trade_manager.py +++ b/chia/wallet/trade_manager.py @@ -72,20 +72,20 @@ class TradeManager: { "coin": bytes "parent_spend": bytes - "siblings": List[bytes] # other coins of the same type being offered - "sibling_spends": List[bytes] # The parent spends for the siblings - "sibling_puzzles": List[Program] # The inner puzzles of the siblings (always OFFER_MOD) - "sibling_solutions": List[Program] # The inner solution of the siblings + "siblings": list[bytes] # other coins of the same type being offered + "sibling_spends": list[bytes] # The parent spends for the siblings + "sibling_puzzles": list[Program] # The inner puzzles of the siblings (always OFFER_MOD) + "sibling_solutions": list[Program] # The inner solution of the siblings } ) Wallet: - Segments in this code that call general wallet methods are highlighted by comments: # ATTENTION: new wallets - To be able to be traded, a wallet must implement these methods on itself: - - generate_signed_transaction(...) -> List[TransactionRecord] (See cat_wallet.py for full API) + - generate_signed_transaction(...) -> list[TransactionRecord] (See cat_wallet.py for full API) - convert_puzzle_hash(puzzle_hash: bytes32) -> bytes32 # Converts a puzzlehash from outer to inner puzzle - get_puzzle_info(asset_id: bytes32) -> PuzzleInfo - - get_coins_to_offer(asset_id: bytes32, amount: uint64) -> Set[Coin] + - get_coins_to_offer(asset_id: bytes32, amount: uint64) -> set[Coin] - If you would like assets from your wallet to be referenced with just a wallet ID, you must also implement: - get_asset_id() -> bytes32 - Finally, you must make sure that your wallet will respond appropriately when these WSM methods are called: diff --git a/chia/wallet/util/blind_signer_tl.py b/chia/wallet/util/blind_signer_tl.py index 2d017b5bddef..be2f24470e0c 100644 --- a/chia/wallet/util/blind_signer_tl.py +++ b/chia/wallet/util/blind_signer_tl.py @@ -17,9 +17,6 @@ ) from chia.wallet.util.clvm_streamable import TranslationLayer, TranslationLayerMapping, clvm_streamable -# Pylint doesn't understand that these classes are in fact dataclasses -# pylint: disable=invalid-field-call - @clvm_streamable @dataclass(frozen=True) diff --git a/chia/wallet/util/debug_spend_bundle.py b/chia/wallet/util/debug_spend_bundle.py index a6a836e8811a..7247ecca0a4d 100644 --- a/chia/wallet/util/debug_spend_bundle.py +++ b/chia/wallet/util/debug_spend_bundle.py @@ -12,7 +12,7 @@ from chia.util.hash import std_hash from chia.wallet.uncurried_puzzle import UncurriedPuzzle -CONDITIONS = {k: bytes(v)[0] for k, v in ConditionOpcode.__members__.items()} # pylint: disable=E1101 +CONDITIONS = {opcode.name: opcode.value[0] for opcode in ConditionOpcode} KFA = {v: k for k, v in CONDITIONS.items()} diff --git a/chia/wallet/util/merkle_tree.py b/chia/wallet/util/merkle_tree.py index 66458c01bbf8..545593eb4787 100644 --- a/chia/wallet/util/merkle_tree.py +++ b/chia/wallet/util/merkle_tree.py @@ -76,14 +76,14 @@ def _proof( if first_hash[0] is not None: final_list = first_hash[1] # TODO: handle hints - # error: Item "None" of "Optional[List[bytes32]]" has no attribute "append" [union-attr] + # error: Item "None" of "Optional[list[bytes32]]" has no attribute "append" [union-attr] final_list.append(rest_hash[2]) # type: ignore[union-attr] bit_num = first_hash[3] final_path = first_hash[0] elif rest_hash[0] is not None: final_list = rest_hash[1] # TODO: handle hints - # error: Item "None" of "Optional[List[bytes32]]" has no attribute "append" [union-attr] + # error: Item "None" of "Optional[list[bytes32]]" has no attribute "append" [union-attr] final_list.append(first_hash[2]) # type: ignore[union-attr] bit_num = rest_hash[3] # TODO: handle hints diff --git a/chia/wallet/util/merkle_utils.py b/chia/wallet/util/merkle_utils.py index 960e8738dd5e..3df8c4767429 100644 --- a/chia/wallet/util/merkle_utils.py +++ b/chia/wallet/util/merkle_utils.py @@ -5,8 +5,8 @@ from chia.types.blockchain_format.sized_bytes import bytes32 -TupleTree = Any # Union[bytes32, Tuple["TupleTree", "TupleTree"]] -Proof_Tree_Type = Any # Union[bytes32, Tuple[bytes32, "Proof_Tree_Type"]] +TupleTree = Any # Union[bytes32, tuple["TupleTree", "TupleTree"]] +Proof_Tree_Type = Any # Union[bytes32, tuple[bytes32, "Proof_Tree_Type"]] HASH_TREE_PREFIX = bytes([2]) diff --git a/chia/wallet/wallet.py b/chia/wallet/wallet.py index 58ebbeb67885..426c5e75ffd0 100644 --- a/chia/wallet/wallet.py +++ b/chia/wallet/wallet.py @@ -501,7 +501,7 @@ async def get_coins_to_offer( # WSChiaConnection is only imported for type checking async def coin_added( self, coin: Coin, height: uint32, peer: WSChiaConnection, coin_data: Optional[Streamable] - ) -> None: # pylint: disable=used-before-assignment + ) -> None: pass def get_name(self) -> str: diff --git a/chia/wallet/wallet_protocol.py b/chia/wallet/wallet_protocol.py index 93c569f731a9..e80427d3573a 100644 --- a/chia/wallet/wallet_protocol.py +++ b/chia/wallet/wallet_protocol.py @@ -58,9 +58,7 @@ def get_name(self) -> str: ... async def match_hinted_coin(self, coin: Coin, hint: bytes32) -> bool: ... wallet_info: WalletInfo - # WalletStateManager is only imported for type hinting thus leaving pylint - # unable to process this - wallet_state_manager: WalletStateManager # pylint: disable=used-before-assignment + wallet_state_manager: WalletStateManager class GSTOptionalArgs(TypedDict): diff --git a/chia/wallet/wallet_state_manager.py b/chia/wallet/wallet_state_manager.py index f614287bbb62..6190ed568c26 100644 --- a/chia/wallet/wallet_state_manager.py +++ b/chia/wallet/wallet_state_manager.py @@ -721,7 +721,7 @@ async def get_unconfirmed_balance( Returns the balance, including coinbase rewards that are not spendable, and unconfirmed transactions. """ - # This API should change so that get_balance_from_coin_records is called for Set[WalletCoinRecord] + # This API should change so that get_balance_from_coin_records is called for set[WalletCoinRecord] # and this method is called only for the unspent_coin_records==None case. if unspent_coin_records is None: wallet_type: WalletType = self.wallets[uint32(wallet_id)].type() diff --git a/poetry.lock b/poetry.lock index db972303ed49..2093a1f2c0ff 100644 --- a/poetry.lock +++ b/poetry.lock @@ -255,20 +255,6 @@ cffi = ">=1.0.1" dev = ["cogapp", "pre-commit", "pytest", "wheel"] tests = ["pytest"] -[[package]] -name = "astroid" -version = "3.2.4" -description = "An abstract syntax tree for Python with inference support." -optional = true -python-versions = ">=3.8.0" -files = [ - {file = "astroid-3.2.4-py3-none-any.whl", hash = "sha256:413658a61eeca6202a59231abb473f932038fbcbf1666587f66d482083413a25"}, - {file = "astroid-3.2.4.tar.gz", hash = "sha256:0e14202810b30da1b735827f78f5157be2bbd4a7a59b7707ca0bfc2fb4c0063a"}, -] - -[package.dependencies] -typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} - [[package]] name = "async-timeout" version = "4.0.3" @@ -810,7 +796,7 @@ typing-extensions = "*" type = "git" url = "https://github.com/chia-network/chia_rs" reference = "merkle_blob" -resolved_reference = "7881538063cbd2c0fc4609f27c058714125ad026" +resolved_reference = "3eafa7f3243e991e78fd0f78bab9bf1ea9bcf179" subdirectory = "wheel/" [[package]] @@ -1012,17 +998,18 @@ dev = ["pytest"] [[package]] name = "clvm-tools-rs" -version = "0.1.43" +version = "0.1.45" description = "tools for working with chialisp language; compiler, repl, python and wasm bindings" optional = false python-versions = "*" files = [ - {file = "clvm_tools_rs-0.1.43-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:0dc68bdc7704d502d0193a9634764fffd2d618207b4a0260dbb32938881dad6c"}, - {file = "clvm_tools_rs-0.1.43-cp38-abi3-macosx_11_0_x86_64.whl", hash = "sha256:49f5065a64a560e9d5ffaf5d30f074cf65a1196a2d9c554724bfff646a8697cc"}, - {file = "clvm_tools_rs-0.1.43-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbc10526fc6abd606d337f93ca89ea52e95e390134d6d15620a3ad9d1a122ba5"}, - {file = "clvm_tools_rs-0.1.43-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3548f9c870e20c1dcdf90820046803d3a9b487a12c0c5d0563031ae7677e64a8"}, - {file = "clvm_tools_rs-0.1.43-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:0d905bb57c3fca1e9b227ed233974a630ac912929091287800c82fdd6a51150a"}, - {file = "clvm_tools_rs-0.1.43-cp38-abi3-win_amd64.whl", hash = "sha256:423915b4098d38112ed8e7b8fcac1eafacb7fb2ac11cf5c371d7853a85577d4f"}, + {file = "clvm_tools_rs-0.1.45-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:fc5a60fa6bbbfb8875258266b82251c4cc9279e3d6435eaaada0b2c033816b1a"}, + {file = "clvm_tools_rs-0.1.45-cp38-abi3-macosx_13_0_x86_64.whl", hash = "sha256:d1cd3240fdb9560c5cc860ac15196a00b70935827018deacf01651fcd9bfa6e5"}, + {file = "clvm_tools_rs-0.1.45-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:fe0dcb7cad0edd1c9b1f7b0e3e6864cea1e13c3f8a6667de83c4721ff5626b4f"}, + {file = "clvm_tools_rs-0.1.45-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:792fb5be40d317e32b86e44f9a3bbbf4d96827b304877e678eaff0abca212b3e"}, + {file = "clvm_tools_rs-0.1.45-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:079b849e0c289b6c8e7e93c027d9ee8083714ad07c5d19a1c0f7bd88efa9615f"}, + {file = "clvm_tools_rs-0.1.45-cp38-abi3-win_amd64.whl", hash = "sha256:3153335cafd180a6a308fef0d3808777fb5f7e13edd43e9ee344afd212e05516"}, + {file = "clvm_tools_rs-0.1.45.tar.gz", hash = "sha256:73848e0504e42bbd9627497c7307147cce9a04678936b5aec5c5a6be4b372c84"}, ] [[package]] @@ -1069,83 +1056,73 @@ portalocker = ">=1.6.0" [[package]] name = "coverage" -version = "7.6.1" +version = "7.6.4" description = "Code coverage measurement for Python" optional = true -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"}, - {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"}, - {file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"}, - {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"}, - {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"}, - {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"}, - {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"}, - {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"}, - {file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"}, - {file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"}, - {file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"}, - {file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"}, - {file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"}, - {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"}, - {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"}, - {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"}, - {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"}, - {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"}, - {file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"}, - {file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"}, - {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"}, - {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"}, - {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"}, - {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"}, - {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"}, - {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"}, - {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"}, - {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"}, - {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"}, - {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"}, - {file = "coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106"}, - {file = "coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9"}, - {file = "coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c"}, - {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a"}, - {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060"}, - {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862"}, - {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388"}, - {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155"}, - {file = "coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a"}, - {file = "coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129"}, - {file = "coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e"}, - {file = "coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962"}, - {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb"}, - {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704"}, - {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b"}, - {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f"}, - {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223"}, - {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"}, - {file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"}, - {file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"}, - {file = "coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0"}, - {file = "coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a"}, - {file = "coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b"}, - {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3"}, - {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de"}, - {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6"}, - {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569"}, - {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989"}, - {file = "coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7"}, - {file = "coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8"}, - {file = "coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255"}, - {file = "coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8"}, - {file = "coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2"}, - {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a"}, - {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc"}, - {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004"}, - {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb"}, - {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36"}, - {file = "coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c"}, - {file = "coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca"}, - {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"}, - {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"}, + {file = "coverage-7.6.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5f8ae553cba74085db385d489c7a792ad66f7f9ba2ee85bfa508aeb84cf0ba07"}, + {file = "coverage-7.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8165b796df0bd42e10527a3f493c592ba494f16ef3c8b531288e3d0d72c1f6f0"}, + {file = "coverage-7.6.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7c8b95bf47db6d19096a5e052ffca0a05f335bc63cef281a6e8fe864d450a72"}, + {file = "coverage-7.6.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ed9281d1b52628e81393f5eaee24a45cbd64965f41857559c2b7ff19385df51"}, + {file = "coverage-7.6.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0809082ee480bb8f7416507538243c8863ac74fd8a5d2485c46f0f7499f2b491"}, + {file = "coverage-7.6.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d541423cdd416b78626b55f123412fcf979d22a2c39fce251b350de38c15c15b"}, + {file = "coverage-7.6.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:58809e238a8a12a625c70450b48e8767cff9eb67c62e6154a642b21ddf79baea"}, + {file = "coverage-7.6.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c9b8e184898ed014884ca84c70562b4a82cbc63b044d366fedc68bc2b2f3394a"}, + {file = "coverage-7.6.4-cp310-cp310-win32.whl", hash = "sha256:6bd818b7ea14bc6e1f06e241e8234508b21edf1b242d49831831a9450e2f35fa"}, + {file = "coverage-7.6.4-cp310-cp310-win_amd64.whl", hash = "sha256:06babbb8f4e74b063dbaeb74ad68dfce9186c595a15f11f5d5683f748fa1d172"}, + {file = "coverage-7.6.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:73d2b73584446e66ee633eaad1a56aad577c077f46c35ca3283cd687b7715b0b"}, + {file = "coverage-7.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:51b44306032045b383a7a8a2c13878de375117946d68dcb54308111f39775a25"}, + {file = "coverage-7.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b3fb02fe73bed561fa12d279a417b432e5b50fe03e8d663d61b3d5990f29546"}, + {file = "coverage-7.6.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed8fe9189d2beb6edc14d3ad19800626e1d9f2d975e436f84e19efb7fa19469b"}, + {file = "coverage-7.6.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b369ead6527d025a0fe7bd3864e46dbee3aa8f652d48df6174f8d0bac9e26e0e"}, + {file = "coverage-7.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ade3ca1e5f0ff46b678b66201f7ff477e8fa11fb537f3b55c3f0568fbfe6e718"}, + {file = "coverage-7.6.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:27fb4a050aaf18772db513091c9c13f6cb94ed40eacdef8dad8411d92d9992db"}, + {file = "coverage-7.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4f704f0998911abf728a7783799444fcbbe8261c4a6c166f667937ae6a8aa522"}, + {file = "coverage-7.6.4-cp311-cp311-win32.whl", hash = "sha256:29155cd511ee058e260db648b6182c419422a0d2e9a4fa44501898cf918866cf"}, + {file = "coverage-7.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:8902dd6a30173d4ef09954bfcb24b5d7b5190cf14a43170e386979651e09ba19"}, + {file = "coverage-7.6.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:12394842a3a8affa3ba62b0d4ab7e9e210c5e366fbac3e8b2a68636fb19892c2"}, + {file = "coverage-7.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2b6b4c83d8e8ea79f27ab80778c19bc037759aea298da4b56621f4474ffeb117"}, + {file = "coverage-7.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d5b8007f81b88696d06f7df0cb9af0d3b835fe0c8dbf489bad70b45f0e45613"}, + {file = "coverage-7.6.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b57b768feb866f44eeed9f46975f3d6406380275c5ddfe22f531a2bf187eda27"}, + {file = "coverage-7.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5915fcdec0e54ee229926868e9b08586376cae1f5faa9bbaf8faf3561b393d52"}, + {file = "coverage-7.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0b58c672d14f16ed92a48db984612f5ce3836ae7d72cdd161001cc54512571f2"}, + {file = "coverage-7.6.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:2fdef0d83a2d08d69b1f2210a93c416d54e14d9eb398f6ab2f0a209433db19e1"}, + {file = "coverage-7.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8cf717ee42012be8c0cb205dbbf18ffa9003c4cbf4ad078db47b95e10748eec5"}, + {file = "coverage-7.6.4-cp312-cp312-win32.whl", hash = "sha256:7bb92c539a624cf86296dd0c68cd5cc286c9eef2d0c3b8b192b604ce9de20a17"}, + {file = "coverage-7.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:1032e178b76a4e2b5b32e19d0fd0abbce4b58e77a1ca695820d10e491fa32b08"}, + {file = "coverage-7.6.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:023bf8ee3ec6d35af9c1c6ccc1d18fa69afa1cb29eaac57cb064dbb262a517f9"}, + {file = "coverage-7.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b0ac3d42cb51c4b12df9c5f0dd2f13a4f24f01943627120ec4d293c9181219ba"}, + {file = "coverage-7.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8fe4984b431f8621ca53d9380901f62bfb54ff759a1348cd140490ada7b693c"}, + {file = "coverage-7.6.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5fbd612f8a091954a0c8dd4c0b571b973487277d26476f8480bfa4b2a65b5d06"}, + {file = "coverage-7.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dacbc52de979f2823a819571f2e3a350a7e36b8cb7484cdb1e289bceaf35305f"}, + {file = "coverage-7.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:dab4d16dfef34b185032580e2f2f89253d302facba093d5fa9dbe04f569c4f4b"}, + {file = "coverage-7.6.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:862264b12ebb65ad8d863d51f17758b1684560b66ab02770d4f0baf2ff75da21"}, + {file = "coverage-7.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5beb1ee382ad32afe424097de57134175fea3faf847b9af002cc7895be4e2a5a"}, + {file = "coverage-7.6.4-cp313-cp313-win32.whl", hash = "sha256:bf20494da9653f6410213424f5f8ad0ed885e01f7e8e59811f572bdb20b8972e"}, + {file = "coverage-7.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:182e6cd5c040cec0a1c8d415a87b67ed01193ed9ad458ee427741c7d8513d963"}, + {file = "coverage-7.6.4-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a181e99301a0ae128493a24cfe5cfb5b488c4e0bf2f8702091473d033494d04f"}, + {file = "coverage-7.6.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:df57bdbeffe694e7842092c5e2e0bc80fff7f43379d465f932ef36f027179806"}, + {file = "coverage-7.6.4-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bcd1069e710600e8e4cf27f65c90c7843fa8edfb4520fb0ccb88894cad08b11"}, + {file = "coverage-7.6.4-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:99b41d18e6b2a48ba949418db48159d7a2e81c5cc290fc934b7d2380515bd0e3"}, + {file = "coverage-7.6.4-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6b1e54712ba3474f34b7ef7a41e65bd9037ad47916ccb1cc78769bae324c01a"}, + {file = "coverage-7.6.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:53d202fd109416ce011578f321460795abfe10bb901b883cafd9b3ef851bacfc"}, + {file = "coverage-7.6.4-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:c48167910a8f644671de9f2083a23630fbf7a1cb70ce939440cd3328e0919f70"}, + {file = "coverage-7.6.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:cc8ff50b50ce532de2fa7a7daae9dd12f0a699bfcd47f20945364e5c31799fef"}, + {file = "coverage-7.6.4-cp313-cp313t-win32.whl", hash = "sha256:b8d3a03d9bfcaf5b0141d07a88456bb6a4c3ce55c080712fec8418ef3610230e"}, + {file = "coverage-7.6.4-cp313-cp313t-win_amd64.whl", hash = "sha256:f3ddf056d3ebcf6ce47bdaf56142af51bb7fad09e4af310241e9db7a3a8022e1"}, + {file = "coverage-7.6.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9cb7fa111d21a6b55cbf633039f7bc2749e74932e3aa7cb7333f675a58a58bf3"}, + {file = "coverage-7.6.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:11a223a14e91a4693d2d0755c7a043db43d96a7450b4f356d506c2562c48642c"}, + {file = "coverage-7.6.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a413a096c4cbac202433c850ee43fa326d2e871b24554da8327b01632673a076"}, + {file = "coverage-7.6.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00a1d69c112ff5149cabe60d2e2ee948752c975d95f1e1096742e6077affd376"}, + {file = "coverage-7.6.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f76846299ba5c54d12c91d776d9605ae33f8ae2b9d1d3c3703cf2db1a67f2c0"}, + {file = "coverage-7.6.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fe439416eb6380de434886b00c859304338f8b19f6f54811984f3420a2e03858"}, + {file = "coverage-7.6.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:0294ca37f1ba500667b1aef631e48d875ced93ad5e06fa665a3295bdd1d95111"}, + {file = "coverage-7.6.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:6f01ba56b1c0e9d149f9ac85a2f999724895229eb36bd997b61e62999e9b0901"}, + {file = "coverage-7.6.4-cp39-cp39-win32.whl", hash = "sha256:bc66f0bf1d7730a17430a50163bb264ba9ded56739112368ba985ddaa9c3bd09"}, + {file = "coverage-7.6.4-cp39-cp39-win_amd64.whl", hash = "sha256:c481b47f6b5845064c65a7bc78bc0860e635a9b055af0df46fdf1c58cebf8e8f"}, + {file = "coverage-7.6.4-pp39.pp310-none-any.whl", hash = "sha256:3c65d37f3a9ebb703e710befdc489a38683a5b152242664b973a7b7b22348a4e"}, + {file = "coverage-7.6.4.tar.gz", hash = "sha256:29fc0f17b1d3fea332f8001d4558f8214af7f1d87a345f3a133c901d60347c73"}, ] [package.dependencies] @@ -1223,20 +1200,6 @@ Pygments = ">=2.9.0,<3.0.0" [package.extras] toml = ["tomli (>=1.2.1)"] -[[package]] -name = "dill" -version = "0.3.7" -description = "serialize all of Python" -optional = true -python-versions = ">=3.7" -files = [ - {file = "dill-0.3.7-py3-none-any.whl", hash = "sha256:76b122c08ef4ce2eedcd4d1abd8e641114bfc6c2867f49f3c41facf65bf19f5e"}, - {file = "dill-0.3.7.tar.gz", hash = "sha256:cc1c8b182eb3013e24bd475ff2e9295af86c1a38eb1aff128dac8962a9ce3c03"}, -] - -[package.extras] -graph = ["objgraph (>=1.7.2)"] - [[package]] name = "distlib" version = "0.3.7" @@ -2284,49 +2247,33 @@ virtualenv = ">=20.10.0" [[package]] name = "psutil" -version = "5.9.4" +version = "6.1.0" description = "Cross-platform lib for process and system monitoring in Python." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "psutil-5.9.4-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c1ca331af862803a42677c120aff8a814a804e09832f166f226bfd22b56feee8"}, - {file = "psutil-5.9.4-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:68908971daf802203f3d37e78d3f8831b6d1014864d7a85937941bb35f09aefe"}, - {file = "psutil-5.9.4-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:3ff89f9b835100a825b14c2808a106b6fdcc4b15483141482a12c725e7f78549"}, - {file = "psutil-5.9.4-cp27-cp27m-win32.whl", hash = "sha256:852dd5d9f8a47169fe62fd4a971aa07859476c2ba22c2254d4a1baa4e10b95ad"}, - {file = "psutil-5.9.4-cp27-cp27m-win_amd64.whl", hash = "sha256:9120cd39dca5c5e1c54b59a41d205023d436799b1c8c4d3ff71af18535728e94"}, - {file = "psutil-5.9.4-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6b92c532979bafc2df23ddc785ed116fced1f492ad90a6830cf24f4d1ea27d24"}, - {file = "psutil-5.9.4-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:efeae04f9516907be44904cc7ce08defb6b665128992a56957abc9b61dca94b7"}, - {file = "psutil-5.9.4-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:54d5b184728298f2ca8567bf83c422b706200bcbbfafdc06718264f9393cfeb7"}, - {file = "psutil-5.9.4-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16653106f3b59386ffe10e0bad3bb6299e169d5327d3f187614b1cb8f24cf2e1"}, - {file = "psutil-5.9.4-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54c0d3d8e0078b7666984e11b12b88af2db11d11249a8ac8920dd5ef68a66e08"}, - {file = "psutil-5.9.4-cp36-abi3-win32.whl", hash = "sha256:149555f59a69b33f056ba1c4eb22bb7bf24332ce631c44a319cec09f876aaeff"}, - {file = "psutil-5.9.4-cp36-abi3-win_amd64.whl", hash = "sha256:fd8522436a6ada7b4aad6638662966de0d61d241cb821239b2ae7013d41a43d4"}, - {file = "psutil-5.9.4-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:6001c809253a29599bc0dfd5179d9f8a5779f9dffea1da0f13c53ee568115e1e"}, - {file = "psutil-5.9.4.tar.gz", hash = "sha256:3d7f9739eb435d4b1338944abe23f49584bde5395f27487d2ee25ad9a8774a62"}, -] - -[package.extras] -test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] - -[[package]] -name = "psutil" -version = "5.9.4" -description = "Cross-platform lib for process and system monitoring in Python." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "psutil-5.9.4-cp37-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:56d557d85b8464e9b8ace0995705a146225b94bdad8fa52f266beae0c3211ca3"}, - {file = "psutil-5.9.4-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a21eb32d5ef5782a29b02b06912336a211d9665d6f6de39b0928c7f0481d1e16"}, - {file = "psutil-5.9.4-cp38-abi3-macosx_10_14_x86_64.whl", hash = "sha256:6a8d6bfca2cdb7d52299446a13985ceaa889be39d5701410c21be567e32c3ede"}, + {file = "psutil-6.1.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ff34df86226c0227c52f38b919213157588a678d049688eded74c76c8ba4a5d0"}, + {file = "psutil-6.1.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:c0e0c00aa18ca2d3b2b991643b799a15fc8f0563d2ebb6040f64ce8dc027b942"}, + {file = "psutil-6.1.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:000d1d1ebd634b4efb383f4034437384e44a6d455260aaee2eca1e9c1b55f047"}, + {file = "psutil-6.1.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:5cd2bcdc75b452ba2e10f0e8ecc0b57b827dd5d7aaffbc6821b2a9a242823a76"}, + {file = "psutil-6.1.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:045f00a43c737f960d273a83973b2511430d61f283a44c96bf13a6e829ba8fdc"}, + {file = "psutil-6.1.0-cp27-none-win32.whl", hash = "sha256:9118f27452b70bb1d9ab3198c1f626c2499384935aaf55388211ad982611407e"}, + {file = "psutil-6.1.0-cp27-none-win_amd64.whl", hash = "sha256:a8506f6119cff7015678e2bce904a4da21025cc70ad283a53b099e7620061d85"}, + {file = "psutil-6.1.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6e2dcd475ce8b80522e51d923d10c7871e45f20918e027ab682f94f1c6351688"}, + {file = "psutil-6.1.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:0895b8414afafc526712c498bd9de2b063deaac4021a3b3c34566283464aff8e"}, + {file = "psutil-6.1.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9dcbfce5d89f1d1f2546a2090f4fcf87c7f669d1d90aacb7d7582addece9fb38"}, + {file = "psutil-6.1.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:498c6979f9c6637ebc3a73b3f87f9eb1ec24e1ce53a7c5173b8508981614a90b"}, + {file = "psutil-6.1.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d905186d647b16755a800e7263d43df08b790d709d575105d419f8b6ef65423a"}, + {file = "psutil-6.1.0-cp36-cp36m-win32.whl", hash = "sha256:6d3fbbc8d23fcdcb500d2c9f94e07b1342df8ed71b948a2649b5cb060a7c94ca"}, + {file = "psutil-6.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:1209036fbd0421afde505a4879dee3b2fd7b1e14fee81c0069807adcbbcca747"}, + {file = "psutil-6.1.0-cp37-abi3-win32.whl", hash = "sha256:1ad45a1f5d0b608253b11508f80940985d1d0c8f6111b5cb637533a0e6ddc13e"}, + {file = "psutil-6.1.0-cp37-abi3-win_amd64.whl", hash = "sha256:a8fb3752b491d246034fa4d279ff076501588ce8cbcdbb62c32fd7a377d996be"}, + {file = "psutil-6.1.0.tar.gz", hash = "sha256:353815f59a7f64cdaca1c0307ee13558a0512f6db064e92fe833784f08539c7a"}, ] [package.extras] -test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] - -[package.source] -type = "legacy" -url = "https://pypi.chia.net/simple" -reference = "chia" +dev = ["black", "check-manifest", "coverage", "packaging", "pylint", "pyperf", "pypinfo", "pytest-cov", "requests", "rstcheck", "ruff", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "virtualenv", "wheel"] +test = ["pytest", "pytest-xdist", "setuptools"] [[package]] name = "py3createtorrent" @@ -2481,36 +2428,6 @@ importlib-metadata = {version = ">=4.6", markers = "python_version < \"3.10\""} packaging = ">=22.0" setuptools = ">=42.0.0" -[[package]] -name = "pylint" -version = "3.2.6" -description = "python code static checker" -optional = true -python-versions = ">=3.8.0" -files = [ - {file = "pylint-3.2.6-py3-none-any.whl", hash = "sha256:03c8e3baa1d9fb995b12c1dbe00aa6c4bcef210c2a2634374aedeb22fb4a8f8f"}, - {file = "pylint-3.2.6.tar.gz", hash = "sha256:a5d01678349454806cff6d886fb072294f56a58c4761278c97fb557d708e1eb3"}, -] - -[package.dependencies] -astroid = ">=3.2.4,<=3.3.0-dev0" -colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} -dill = [ - {version = ">=0.2", markers = "python_version < \"3.11\""}, - {version = ">=0.3.6", markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, - {version = ">=0.3.7", markers = "python_version >= \"3.12\""}, -] -isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" -mccabe = ">=0.6,<0.8" -platformdirs = ">=2.2.0" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -tomlkit = ">=0.10.1" -typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} - -[package.extras] -spelling = ["pyenchant (>=3.2,<4.0)"] -testutils = ["gitpython (>3)"] - [[package]] name = "pyproject-hooks" version = "1.0.0" @@ -2763,6 +2680,33 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "ruff" +version = "0.7.1" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.7.1-py3-none-linux_armv6l.whl", hash = "sha256:cb1bc5ed9403daa7da05475d615739cc0212e861b7306f314379d958592aaa89"}, + {file = "ruff-0.7.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:27c1c52a8d199a257ff1e5582d078eab7145129aa02721815ca8fa4f9612dc35"}, + {file = "ruff-0.7.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:588a34e1ef2ea55b4ddfec26bbe76bc866e92523d8c6cdec5e8aceefeff02d99"}, + {file = "ruff-0.7.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94fc32f9cdf72dc75c451e5f072758b118ab8100727168a3df58502b43a599ca"}, + {file = "ruff-0.7.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:985818742b833bffa543a84d1cc11b5e6871de1b4e0ac3060a59a2bae3969250"}, + {file = "ruff-0.7.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32f1e8a192e261366c702c5fb2ece9f68d26625f198a25c408861c16dc2dea9c"}, + {file = "ruff-0.7.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:699085bf05819588551b11751eff33e9ca58b1b86a6843e1b082a7de40da1565"}, + {file = "ruff-0.7.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:344cc2b0814047dc8c3a8ff2cd1f3d808bb23c6658db830d25147339d9bf9ea7"}, + {file = "ruff-0.7.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4316bbf69d5a859cc937890c7ac7a6551252b6a01b1d2c97e8fc96e45a7c8b4a"}, + {file = "ruff-0.7.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79d3af9dca4c56043e738a4d6dd1e9444b6d6c10598ac52d146e331eb155a8ad"}, + {file = "ruff-0.7.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:c5c121b46abde94a505175524e51891f829414e093cd8326d6e741ecfc0a9112"}, + {file = "ruff-0.7.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8422104078324ea250886954e48f1373a8fe7de59283d747c3a7eca050b4e378"}, + {file = "ruff-0.7.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:56aad830af8a9db644e80098fe4984a948e2b6fc2e73891538f43bbe478461b8"}, + {file = "ruff-0.7.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:658304f02f68d3a83c998ad8bf91f9b4f53e93e5412b8f2388359d55869727fd"}, + {file = "ruff-0.7.1-py3-none-win32.whl", hash = "sha256:b517a2011333eb7ce2d402652ecaa0ac1a30c114fbbd55c6b8ee466a7f600ee9"}, + {file = "ruff-0.7.1-py3-none-win_amd64.whl", hash = "sha256:f38c41fcde1728736b4eb2b18850f6d1e3eedd9678c914dede554a70d5241307"}, + {file = "ruff-0.7.1-py3-none-win_arm64.whl", hash = "sha256:19aa200ec824c0f36d0c9114c8ec0087082021732979a359d6f3c390a6ff2a37"}, + {file = "ruff-0.7.1.tar.gz", hash = "sha256:9d8a41d4aa2dad1575adb98a82870cf5db5f76b2938cf2206c22c940034a36f4"}, +] + [[package]] name = "s3transfer" version = "0.10.1" @@ -2981,17 +2925,6 @@ files = [ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] -[[package]] -name = "tomlkit" -version = "0.12.1" -description = "Style preserving TOML library" -optional = true -python-versions = ">=3.7" -files = [ - {file = "tomlkit-0.12.1-py3-none-any.whl", hash = "sha256:712cbd236609acc6a3e2e97253dfc52d4c2082982a88f61b640ecf0817eab899"}, - {file = "tomlkit-0.12.1.tar.gz", hash = "sha256:38e1ff8edb991273ec9f6181244a6a391ac30e9f5098e7535640ea6be97a7c86"}, -] - [[package]] name = "types-aiofiles" version = "23.2.0.20240311" @@ -3412,11 +3345,11 @@ url = "https://pypi.chia.net/simple" reference = "chia" [extras] -dev = ["aiohttp_cors", "black", "build", "coverage", "diff-cover", "flake8", "isort", "lxml", "mypy", "pre-commit", "pre-commit", "py3createtorrent", "pyinstaller", "pylint", "pytest", "pytest-cov", "pytest-mock", "pytest-monitor", "pytest-xdist", "pyupgrade", "types-aiofiles", "types-cryptography", "types-pyyaml", "types-setuptools"] +dev = ["aiohttp_cors", "black", "build", "coverage", "diff-cover", "flake8", "isort", "lxml", "mypy", "pre-commit", "pre-commit", "py3createtorrent", "pyinstaller", "pytest", "pytest-cov", "pytest-mock", "pytest-monitor", "pytest-xdist", "pyupgrade", "types-aiofiles", "types-cryptography", "types-pyyaml", "types-setuptools"] legacy-keyring = ["keyrings.cryptfile"] upnp = ["miniupnpc"] [metadata] lock-version = "2.0" python-versions = ">=3.9, <3.13" -content-hash = "fccc8e224fd9ca1c12ac374e42b54628418e811730a15fc3f0d2111a9a869cbf" +content-hash = "70aa7d6f44d6c3cf746ddedf48d9f2cd538860a15fca3b4a490bc65c2578bf52" diff --git a/pylintrc b/pylintrc deleted file mode 100644 index 7913c3742373..000000000000 --- a/pylintrc +++ /dev/null @@ -1,583 +0,0 @@ -[MASTER] -# A comma-separated list of package or module names from where C extensions may -# be loaded. Extensions are loading into the active Python interpreter and may -# run arbitrary code -extension-pkg-allow-list=lxml,zstd - -# Add files or directories to the blacklist. They should be base names, not -# paths. -ignore=CVS - -# Add files or directories matching the regex patterns to the blacklist. The -# regex matches against base names, not paths. -ignore-patterns= - -# Python code to execute, usually for sys.path manipulation such as -# pygtk.require(). -#init-hook= - -# Use multiple processes to speed up Pylint. -jobs=4 - -# List of plugins (as comma separated values of python modules names) to load, -# usually to register additional checkers. -load-plugins= - -# Pickle collected data for later comparisons. -persistent=yes - -# Specify a configuration file. -#rcfile= - -# When enabled, pylint would attempt to guess common misconfiguration and emit -# user-friendly hints instead of false-positive error messages -suggestion-mode=yes - -# Allow loading of arbitrary C extensions. Extensions are imported into the -# active Python interpreter and may run arbitrary code. -unsafe-load-any-extension=no - - -[MESSAGES CONTROL] - -# Only show warnings with the listed confidence levels. Leave empty to show -# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED -confidence= - -# Disable the message, report, category or checker with the given id(s). You -# can either give multiple identifiers separated by comma (,) or put this -# option multiple times (only on the command line, not in the configuration -# file where it should appear only once).You can also use "--disable=all" to -# disable everything first and then reenable specific checks. For example, if -# you want to run only the similarities checker, you can use "--disable=all -# --enable=similarities". If you want to run only the classes checker, but have -# no Warning level messages displayed, use"--disable=all --enable=classes -# --disable=W" -disable=locally-disabled, - suppressed-message, - import-error, - abstract-method, - arguments-differ, - arguments-renamed, - attribute-defined-outside-init, - bare-except, - broad-exception-caught, - broad-exception-raised, - cell-var-from-loop, - chained-comparison, - consider-iterating-dictionary, - consider-using-enumerate, - consider-using-in, - consider-using-with, - cyclic-import, - dangerous-default-value, - duplicate-code, - duplicate-key, - expression-not-assigned, - fixme, - global-statement, - global-variable-not-assigned, - implicit-str-concat, - import-outside-toplevel, - inconsistent-return-statements, - invalid-name, - keyword-arg-before-vararg, - line-too-long, - logging-fstring-interpolation, - logging-not-lazy, - lost-exception, - missing-class-docstring, - missing-function-docstring, - missing-module-docstring, - no-else-break, - no-else-continue, - no-else-raise, - no-else-return, - pointless-statement, - pointless-string-statement, - protected-access, - raise-missing-from, - raising-format-tuple, - redefined-argument-from-local, - redefined-builtin, - redefined-outer-name, - reimported, - return-in-finally, - simplifiable-condition, - simplifiable-if-expression, - simplifiable-if-statement, - singleton-comparison, - subprocess-run-check, - superfluous-parens, - too-few-public-methods, - too-many-ancestors, - too-many-arguments, - too-many-boolean-expressions, - too-many-branches, - too-many-instance-attributes, - too-many-lines, - too-many-locals, - too-many-nested-blocks, - too-many-public-methods, - too-many-return-statements, - too-many-statements, - try-except-raise, - typevar-name-incorrect-variance, - unbalanced-tuple-unpacking, - undefined-loop-variable, - ungrouped-imports, - unidiomatic-typecheck, - unknown-option-value, - unnecessary-comprehension, - unnecessary-dict-index-lookup, - unnecessary-dunder-call, - unnecessary-ellipsis, - unnecessary-lambda, - unnecessary-lambda-assignment, - unnecessary-negation, - unnecessary-pass, - unspecified-encoding, - unused-argument, - unused-import, - unused-variable, - unused-wildcard-import, - use-a-generator, - use-dict-literal, - use-implicit-booleaness-not-comparison, - use-implicit-booleaness-not-len, - useless-else-on-loop, - useless-import-alias, - useless-return, - use-list-literal, - wildcard-import, - wrong-import-order, - wrong-import-position, - multiple-statements, - contextmanager-generator-missing-cleanup, - -# Enable the message, report, category or checker with the given id(s). You can -# either give multiple identifier separated by comma (,) or put this option -# multiple time (only on the command line, not in the configuration file where -# it should appear only once). See also the "--disable" option for examples. -enable=c-extension-no-member - - -[REPORTS] - -# Python expression which should return a note less than 10 (10 is the highest -# note). You have access to the variables errors warning, statement which -# respectively contain the number of errors / warnings messages and the total -# number of statements analyzed. This is used by the global evaluation report -# (RP0004). -evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) - -# Template used to display messages. This is a python new-style format string -# used to format the message information. See doc for all details -#msg-template= - -# Set the output format. Available formats are text, parseable, colorized, json -# and msvs (visual studio).You can also give a reporter class, eg -# mypackage.mymodule.MyReporterClass. -output-format=text - -# Tells whether to display a full report or only the messages -reports=no - -# Activate the evaluation score. -score=no - - -[REFACTORING] - -# Maximum number of nested blocks for function / method body -max-nested-blocks=5 - -# Complete name of functions that never returns. When checking for -# inconsistent-return-statements if a never returning function is called then -# it will be considered as an explicit return statement and no message will be -# printed. -never-returning-functions=optparse.Values,sys.exit - - -[VARIABLES] - -# List of additional names supposed to be defined in builtins. Remember that -# you should avoid to define new builtins when possible. -additional-builtins=cls - -# Tells whether unused global variables should be treated as a violation. -allow-global-unused-variables=yes - -# List of strings which can identify a callback function by name. A callback -# name must start or end with one of those strings. -callbacks=cb_, - _cb - -# A regular expression matching the name of dummy variables (i.e. expectedly -# not used). -dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ - -# Argument names that match this expression will be ignored. Default to name -# with leading underscore -ignored-argument-names=_.*|^ignored_|^unused_ - -# Tells whether we should check for unused import in __init__ files. -init-import=no - -# List of qualified module names which can have objects that can redefine -# builtins. -redefining-builtins-modules=six.moves,past.builtins,future.builtins - - -[LOGGING] - -# Logging modules to check that the string format arguments are in logging -# function parameter format -logging-modules=logging - - -[TYPECHECK] - -# List of decorators that produce context managers, such as -# contextlib.contextmanager. Add to this list to register other decorators that -# produce valid context managers. -contextmanager-decorators=contextlib.contextmanager - -# List of members which are set dynamically and missed by pylint inference -# system, and so shouldn't trigger E1101 when accessed. Python regular -# expressions are accepted. -generated-members= - -# Tells whether missing members accessed in mixin class should be ignored. A -# mixin class is detected if its name ends with "mixin" (case insensitive). -ignore-mixin-members=yes - -# This flag controls whether pylint should warn about no-member and similar -# checks whenever an opaque object is returned when inferring. The inference -# can return multiple potential results while evaluating a Python object, but -# some branches might not be evaluated, which results in partial inference. In -# that case, it might be useful to still emit no-member and other checks for -# the rest of the inferred objects. -ignore-on-opaque-inference=yes - -# List of class names for which member attributes should not be checked (useful -# for classes with dynamically set attributes). This supports the use of -# qualified names. -ignored-classes=optparse.Values,thread._local,_thread._local - -# List of module names for which member attributes should not be checked -# (useful for modules/projects where namespaces are manipulated during runtime -# and thus existing member attributes cannot be deduced by static analysis. It -# supports qualified module names, as well as Unix pattern matching. -ignored-modules=chiabip158, - chiapos, - chiavdf, - chia_rs, - cryptography, - aiohttp, - keyring, - bitstring, - clvm_tools, - clvm_tools_rs, - setproctitle, - clvm, - colorlog, - concurrent_log_handler, - aiosqlite, - sortedcontainers, - aiter, - miniupnpc, - pytest, - setuptools_scm - -# Show a hint with possible names when a member name was not found. The aspect -# of finding the hint is based on edit distance. -missing-member-hint=yes - -# The minimum edit distance a name should have in order to be considered a -# similar match for a missing member name. -missing-member-hint-distance=1 - -# The total number of similar names that should be taken in consideration when -# showing a hint for a missing member. -missing-member-max-choices=1 - - -[MISCELLANEOUS] - -# List of note tags to take in consideration, separated by a comma. -notes=FIXME, - XXX, - TODO - - -[BASIC] - -# Naming style matching correct argument names -argument-naming-style=snake_case - -# Regular expression matching correct argument names. Overrides argument- -# naming-style -#argument-rgx= - -# Naming style matching correct attribute names -attr-naming-style=snake_case - -# Regular expression matching correct attribute names. Overrides attr-naming- -# style -#attr-rgx= - -# Bad variable names which should always be refused, separated by a comma -bad-names=foo, - bar, - baz, - toto, - tutu, - tata - -# Naming style matching correct class attribute names -class-attribute-naming-style=any - -# Regular expression matching correct class attribute names. Overrides class- -# attribute-naming-style -#class-attribute-rgx= - -# Naming style matching correct class names -class-naming-style=PascalCase - -# Regular expression matching correct class names. Overrides class-naming-style -#class-rgx= - -# Naming style matching correct constant names -const-naming-style=UPPER_CASE - -# Regular expression matching correct constant names. Overrides const-naming- -# style -#const-rgx= - -# Minimum line length for functions/classes that require docstrings, shorter -# ones are exempt. -docstring-min-length=-1 - -# Naming style matching correct function names -function-naming-style=snake_case - -# Regular expression matching correct function names. Overrides function- -# naming-style -#function-rgx= - -# Good variable names which should always be accepted, separated by a comma -good-names=i, - j, - k, - ex, - Run, - _ - -# Include a hint for the correct naming format with invalid-name -include-naming-hint=no - -# Naming style matching correct inline iteration names -inlinevar-naming-style=any - -# Regular expression matching correct inline iteration names. Overrides -# inlinevar-naming-style -#inlinevar-rgx= - -# Naming style matching correct method names -method-naming-style=snake_case - -# Regular expression matching correct method names. Overrides method-naming- -# style -#method-rgx= - -# Naming style matching correct module names -module-naming-style=snake_case - -# Regular expression matching correct module names. Overrides module-naming- -# style -#module-rgx= - -# Colon-delimited sets of names that determine each other's naming style when -# the name regexes allow several styles. -name-group= - -# Regular expression which should only match function or class names that do -# not require a docstring. -no-docstring-rgx=^_ - -# List of decorators that produce properties, such as abc.abstractproperty. Add -# to this list to register other decorators that produce valid properties. -property-classes=abc.abstractproperty - -# Naming style matching correct variable names -variable-naming-style=snake_case - -# Regular expression matching correct variable names. Overrides variable- -# naming-style -#variable-rgx= - - -[SPELLING] - -# Limits count of emitted suggestions for spelling mistakes -max-spelling-suggestions=4 - -# Spelling dictionary name. Available dictionaries: none. To make it working -# install python-enchant package. -spelling-dict= - -# List of comma separated words that should not be checked. -spelling-ignore-words= - -# A path to a file that contains private dictionary; one word per line. -spelling-private-dict-file= - -# Tells whether to store unknown words to indicated private dictionary in -# --spelling-private-dict-file option instead of raising a message. -spelling-store-unknown-words=no - - -[FORMAT] - -# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. -expected-line-ending-format= - -# Regexp for a line that is allowed to be longer than the limit. -ignore-long-lines=^\s*(# )??$ - -# Number of spaces of indent required inside a hanging or continued line. -indent-after-paren=4 - -# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 -# tab). -indent-string=' ' - -# Maximum number of characters on a single line. -max-line-length=120 - -# Maximum number of lines in a module -max-module-lines=1000 - -# Allow the body of a class to be on the same line as the declaration if body -# contains single statement. -single-line-class-stmt=no - -# Allow the body of an if to be on the same line as the test if there is no -# else. -single-line-if-stmt=no - - -[SIMILARITIES] - -# Ignore comments when computing similarities. -ignore-comments=yes - -# Ignore docstrings when computing similarities. -ignore-docstrings=yes - -# Ignore imports when computing similarities. -ignore-imports=no - -# Minimum lines number of a similarity. -min-similarity-lines=4 - - -[DESIGN] - -# Maximum number of arguments for function / method -max-args=5 - -# Maximum number of attributes for a class (see R0902). -max-attributes=7 - -# Maximum number of boolean expressions in a if statement -max-bool-expr=5 - -# Maximum number of branch for function / method body -max-branches=12 - -# Maximum number of locals for function / method body -max-locals=15 - -# Maximum number of parents for a class (see R0901). -max-parents=7 - -# Maximum number of public methods for a class (see R0904). -max-public-methods=20 - -# Maximum number of return / yield for function / method body -max-returns=6 - -# Maximum number of statements in function / method body -max-statements=50 - -# Minimum number of public methods for a class (see R0903). -min-public-methods=2 - -# The following functions mutate other functions' signatures and need a E1120 exception -signature-mutators=chia.rpc.util.tx_endpoint - -[IMPORTS] - -# Allow wildcard imports from modules that define __all__. -allow-wildcard-with-all=no - -# Analyse import fallback blocks. This can be used to support both Python 2 and -# 3 compatible code, which means that the block might have code that exists -# only in one or another interpreter, leading to false positives when analysed. -analyse-fallback-blocks=no - -# Deprecated modules which should not be used, separated by a comma -deprecated-modules=regsub, - TERMIOS, - Bastion, - rexec - -# Create a graph of external dependencies in the given file (report RP0402 must -# not be disabled) -ext-import-graph= - -# Create a graph of every (i.e. internal and external) dependencies in the -# given file (report RP0402 must not be disabled) -import-graph= - -# Create a graph of internal dependencies in the given file (report RP0402 must -# not be disabled) -int-import-graph= - -# Force import order to recognize a module as part of the standard -# compatibility libraries. -known-standard-library= - -# Force import order to recognize a module as part of a third party library. -known-third-party=enchant - - -[CLASSES] - -# List of method names used to declare (i.e. assign) instance attributes. -defining-attr-methods=__init__, - __new__, - setUp - -# List of member names, which should be excluded from the protected access -# warning. -exclude-protected=_asdict, - _fields, - _replace, - _source, - _make - -# List of valid names for the first argument in a class method. -valid-classmethod-first-arg=cls - -# List of valid names for the first argument in a metaclass class method. -valid-metaclass-classmethod-first-arg=mcs - - -[EXCEPTIONS] - -# Exceptions that will emit a warning when being caught. Defaults to -# "Exception" -overgeneral-exceptions=builtins.Exception diff --git a/pyproject.toml b/pyproject.toml index 647d3e713324..1fa46ec2af09 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -51,7 +51,7 @@ chiavdf = "1.1.8" # timelord and vdf verification click = "8.1.7" # For the CLI clvm = "0.9.10" clvm_tools = "0.4.9" # Currying Program.to other conveniences -clvm_tools_rs = "0.1.43" # Rust implementation of clvm_tools' compiler +clvm_tools_rs = "0.1.45" # Rust implementation of clvm_tools' compiler colorama = "0.4.6" # Colorizes terminal output colorlog = "6.8.2" # Adds color to logs concurrent_log_handler = "0.9.25" # Concurrently log and rotate logs @@ -62,10 +62,7 @@ filelock = "3.15.4" # For reading and writing config multiprocess and multithre keyring = "25.2.1" # Store keys in MacOS Keychain, Windows Credential Locker packaging = "24.0" pip = "24.2" -psutil = [ - {version="5.9.4", markers="platform_machine!='aarch64'"}, - {version="5.9.4", markers="platform_machine=='aarch64'", source="chia"}, -] +psutil = "6.1.0" pyyaml = "6.0.1" # Used for config file format setproctitle = "1.3.3" # Gives the chia processes readable names setuptools = "75.1.0" @@ -81,7 +78,7 @@ hsms = "0.3.1" aiohttp_cors = { version = "0.7.0", optional = true } black = { version = "24.8.0", optional = true } build = { version = "1.2.1", optional = true } -coverage = { version = "7.6.1", optional = true } +coverage = { version = "7.6.4", optional = true } diff-cover = { version = "9.2.0", optional = true } flake8 = { version = "7.1.1", optional = true } isort = { version = "5.13.2", optional = true } @@ -91,7 +88,6 @@ mypy = { version = "1.11.1", optional = true } pre-commit = [ { version = "3.5.0", python = "<3.9", optional = true }, { version = "3.7.1", python = ">=3.9", optional = true } ] py3createtorrent = { version = "1.2.1", optional = true } pyinstaller = { version = "6.9.0", optional = true } -pylint = { version = "3.2.6", optional = true } pytest = { version = "8.3.3", optional = true } pytest-cov = { version = "5.0.0", optional = true } pytest-mock = { version = "3.14.0", optional = true } @@ -108,10 +104,11 @@ pyupgrade = { version = "3.16.0", optional = true } # numpy = [ # {version="1.24.4", python = "<3.9", optional = true}, # {version="1.26.4", python = ">=3.9", optional = true}] +ruff = "0.7.1" [tool.poetry.extras] -dev = ["aiohttp_cors", "black", "build", "coverage", "diff-cover", "flake8", "isort", "mypy", "pre-commit", "py3createtorrent", "pyinstaller", "pylint", "pytest", "pytest-cov", "pytest-mock", "pytest-monitor", "pytest-xdist", "types-aiofiles", "types-cryptography", "types-pyyaml", "types-setuptools", "pyupgrade", "lxml"] +dev = ["aiohttp_cors", "black", "build", "coverage", "diff-cover", "flake8", "isort", "mypy", "pre-commit", "py3createtorrent", "pyinstaller", "pytest", "pytest-cov", "pytest-mock", "pytest-monitor", "pytest-xdist", "types-aiofiles", "types-cryptography", "types-pyyaml", "types-setuptools", "pyupgrade", "lxml"] upnp = ["miniupnpc"] legacy_keyring = ["keyrings.cryptfile"] @@ -158,3 +155,63 @@ include = ''' )$ ''' exclude = '' + +[tool.ruff] +line-length = 120 + +[tool.ruff.lint] +preview = true +select = ["PL"] +explicit-preview-rules = false +ignore = [ + # Pylint convention + "PLC0105", # type-name-incorrect-variance + "PLC0415", # import-outside-top-level + "PLC2801", # unnecessary-dunder-call + "PLC0206", # dict-index-missing-items + "PLC1901", # compare-to-empty-string + "PLC2701", # import-private-name + "PLC0414", # useless-import-alias + + # Pylint refactor + "PLR0915", # too-many-statements + "PLR0914", # too-many-locals + "PLR0913", # too-many-arguments + "PLR0912", # too-many-branches + "PLR1702", # too-many-nested-blocks + "PLR0904", # too-many-public-methods + "PLR6301", # no-self-use + "PLR0917", # too-many-positional-arguments + "PLR6201", # literal-membership + "PLR0911", # too-many-return-statements + "PLR2004", # magic-value-comparison + "PLR1714", # repeated-equality-comparison + "PLR6104", # non-augmented-assignment + "PLR1704", # redefined-argument-from-local + "PLR0916", # too-many-boolean-expressions + "PLR5501", # collapsible-else-if + "PLR1711", # useless-return + "PLR1730", # if-stmt-min-max + "PLR1736", # unnecessary-list-index-lookup + "PLR1733", # unnecessary-dict-index-lookup + + # Pylint warning + "PLW2901", # redefined-loop-name + "PLW1641", # eq-without-hash + "PLW1514", # unspecified-encoding + "PLW0602", # global-variable-not-assigned + "PLW0603", # global-statement + "PLW0108", # unnecessary-lambda + "PLW1510", # subprocess-run-without-check + "PLW0120", # useless-else-on-loop +] + +[tool.ruff.lint.pylint] +max-args = 5 +max-locals = 15 +max-returns = 6 +max-branches = 12 +max-statements = 50 +max-nested-blocks = 5 +max-public-methods = 20 +max-bool-expr = 5 diff --git a/tools/analyze-chain.py b/tools/analyze-chain.py index 3278e6898401..293f4d5a3100 100755 --- a/tools/analyze-chain.py +++ b/tools/analyze-chain.py @@ -163,5 +163,4 @@ def default_call( if __name__ == "__main__": - # pylint: disable = no-value-for-parameter main() diff --git a/tools/analyze_memory_profile.py b/tools/analyze_memory_profile.py index 976151ce95ee..ffe08b4532e7 100644 --- a/tools/analyze_memory_profile.py +++ b/tools/analyze_memory_profile.py @@ -211,4 +211,4 @@ def analyze_slot(ctx: click.Context, slot: int) -> None: if __name__ == "__main__": - memory_profiler() # pylint: disable = no-value-for-parameter + memory_profiler() diff --git a/tools/cpu_utilization.py b/tools/cpu_utilization.py index 9acc028b0d53..40cb24d87f9b 100644 --- a/tools/cpu_utilization.py +++ b/tools/cpu_utilization.py @@ -115,5 +115,4 @@ def main(pid: int, output: str, threads: bool) -> None: if __name__ == "__main__": - # pylint: disable = no-value-for-parameter main() diff --git a/tools/generate_chain.py b/tools/generate_chain.py index 87c135539f7e..4d3b0d59823b 100644 --- a/tools/generate_chain.py +++ b/tools/generate_chain.py @@ -222,5 +222,4 @@ def main(length: int, fill_rate: int, profile: bool, block_refs: bool, output: O if __name__ == "__main__": - # pylint: disable = no-value-for-parameter main() diff --git a/tools/run_block.py b/tools/run_block.py index b8f5c7e8a0f1..2d5a9feac4cc 100644 --- a/tools/run_block.py +++ b/tools/run_block.py @@ -76,4 +76,4 @@ def get_config_and_constants(): if __name__ == "__main__": - cmd_run_json_block_file() # pylint: disable=no-value-for-parameter + cmd_run_json_block_file() diff --git a/tools/test_full_sync.py b/tools/test_full_sync.py index 4701ba8d3ab1..ef928ea667ca 100755 --- a/tools/test_full_sync.py +++ b/tools/test_full_sync.py @@ -21,6 +21,7 @@ from chia.server.ws_connection import WSChiaConnection from chia.types.full_block import FullBlock from chia.types.validation_state import ValidationState +from chia.util.augmented_chain import AugmentedBlockchain from chia.util.config import load_config @@ -165,6 +166,7 @@ async def run_sync_checkpoint( header_hash = block_batch[0].prev_header_hash success, _, err = await full_node.add_block_batch( + AugmentedBlockchain(full_node.blockchain), block_batch, peer_info, ForkInfo(fork_height, fork_height, header_hash), @@ -188,6 +190,7 @@ async def run_sync_checkpoint( fork_height = block_batch[0].height - 1 fork_header_hash = block_batch[0].prev_header_hash success, _, err = await full_node.add_block_batch( + AugmentedBlockchain(full_node.blockchain), block_batch, peer_info, ForkInfo(fork_height, fork_height, fork_header_hash), @@ -201,5 +204,4 @@ async def run_sync_checkpoint( main.add_command(analyze) if __name__ == "__main__": - # pylint: disable = no-value-for-parameter main()