diff --git a/.github/workflows/auto-release.yml b/.github/workflows/auto-release.yml index 2eb689d..b246356 100644 --- a/.github/workflows/auto-release.yml +++ b/.github/workflows/auto-release.yml @@ -32,7 +32,7 @@ jobs: - name: Setup Python uses: Chia-Network/actions/setup-python@main with: - python-version: '3' + python-version: "3" - name: Install commitizen run: | diff --git a/.github/workflows/ensure-version-increment.yml b/.github/workflows/ensure-version-increment.yml index d1270dd..6f159e4 100644 --- a/.github/workflows/ensure-version-increment.yml +++ b/.github/workflows/ensure-version-increment.yml @@ -4,7 +4,7 @@ name: Check Version Increment on: pull_request: branches: - - 'main' + - "main" concurrency: # SHA is added to the end if on `main` to let all main workflows run diff --git a/.github/workflows/release-notes.yml b/.github/workflows/release-notes.yml index 33ac428..beb73d8 100644 --- a/.github/workflows/release-notes.yml +++ b/.github/workflows/release-notes.yml @@ -3,7 +3,7 @@ name: Create release notes on: push: tags: - - '**' + - "**" permissions: contents: write @@ -27,7 +27,7 @@ jobs: - name: Setup Python uses: Chia-Network/actions/setup-python@main with: - python-version: '3' + python-version: "3" - name: Install commitizen run: | diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index aeab675..349ac58 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -7,7 +7,7 @@ on: - stage pull_request: branches: - - '**' + - "**" jobs: test: @@ -31,7 +31,7 @@ jobs: - name: Setup Python uses: Chia-Network/actions/setup-python@main with: - python-version: '3.10' + python-version: "3.10" - name: Create virtual environment uses: Chia-Network/actions/create-venv@main diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 3e5f939..1cd3b3c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,37 +1,40 @@ repos: -- hooks: - - id: end-of-file-fixer - - id: trailing-whitespace - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.3.0 -- hooks: - - id: flake8 - repo: https://github.com/pycqa/flake8 - rev: 6.1.0 -- hooks: - - args: - - --profile - - black - id: isort - repo: https://github.com/pycqa/isort - rev: 5.12.0 -- hooks: - - id: black - repo: https://github.com/psf/black - rev: 23.7.0 -- hooks: - - entry: mypy - id: mypy - language: system - name: mypy - types: - - python - - pyi - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.4.1 -- hooks: - - id: commitizen - stages: - - commit-msg - repo: https://github.com/commitizen-tools/commitizen - rev: v2.27.1 + - hooks: + - id: end-of-file-fixer + - id: trailing-whitespace + repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.3.0 + - hooks: + - id: flake8 + repo: https://github.com/pycqa/flake8 + rev: 6.1.0 + - hooks: + - args: + - --profile + - black + id: isort + repo: https://github.com/pycqa/isort + rev: 5.12.0 + - hooks: + - id: black + repo: https://github.com/psf/black + rev: 23.7.0 + - hooks: + - entry: mypy + id: mypy + language: system + name: mypy + pass_filenames: false + repo: https://github.com/pre-commit/mirrors-mypy + rev: v1.11.1 + - hooks: + - id: commitizen + stages: + - commit-msg + repo: https://github.com/commitizen-tools/commitizen + rev: v2.27.1 + - hooks: + - id: prettier + types_or: [ini, json, toml, yaml, markdown] + repo: https://github.com/pre-commit/mirrors-prettier + rev: v3.1.0 diff --git a/CHANGELOG.md b/CHANGELOG.md index 557ee88..80db7bd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -106,7 +106,7 @@ ### Fix -- update climate warehouse and ui parameter names +- update climate warehouse and ui parameter names - add exit code for incorrect host configuration - format host conditional - require localhost unless running in explorer mode diff --git a/README.md b/README.md index 74976b7..f20d7fb 100644 --- a/README.md +++ b/README.md @@ -5,49 +5,49 @@ This application can run in 4 modes, each providing a separate application with a distinct use case: -* **Chia Climate Tokenization**: - * Mode: Registry - * Use case: A registry would use this to tokenize carbon credits onto the Chia blockchain - * Port: 31312 - * Application Name: climate-tokenization-chia - * *Only listens on localhost for connections from the [Climate Tokenization Engine](https://github.com/Chia-Network/Climate-Tokenization-Engine)* -* **Climate Explorer**: - * Mode: Explorer - * Use case: A registry (or interested observer) would use this to track all on-chain activity related to tokenized carbon credits - * Port: 31313 - * Application Name: climate-explorer -* **Climate Token Driver**: - * Mode: Client - * Use case: A carbon token holder could use this in conjunction with the [Climate Wallet](https://github.com/Chia-Network/Climate-Wallet) to manage their tokenized carbon credits - * Port: 31314 - * Application Name: climate-token-driver -* **Dev Mode (for developers only!)**: - * Mode: Dev - * Use case: Developers are able to test the software without having to communicate with the blockchain - * Port: 31999 - * Application Name: Only available from source builds - -When compiling from source, the "mode" is controlled by the `.env` file. Each application, or mode, is offered as precompiled binaries, appropriate for most users. +- **Chia Climate Tokenization**: + - Mode: Registry + - Use case: A registry would use this to tokenize carbon credits onto the Chia blockchain + - Port: 31312 + - Application Name: climate-tokenization-chia + - Only listens on localhost for connections from the [Climate Tokenization Engine](https://github.com/Chia-Network/Climate-Tokenization-Engine) +- **Climate Explorer**: + - Mode: Explorer + - Use case: A registry (or interested observer) would use this to track all on-chain activity related to tokenized carbon credits + - Port: 31313 + - Application Name: climate-explorer +- **Climate Token Driver**: + - Mode: Client + - Use case: A carbon token holder could use this in conjunction with the [Climate Wallet](https://github.com/Chia-Network/Climate-Wallet) to manage their tokenized carbon credits + - Port: 31314 + - Application Name: climate-token-driver +- **Dev Mode (for developers only!)**: + - Mode: Dev + - Use case: Developers are able to test the software without having to communicate with the blockchain + - Port: 31999 + - Application Name: Only available from source builds + +When compiling from source, the "mode" is controlled by the `.env` file. Each application, or mode, is offered as precompiled binaries, appropriate for most users. ## Related Projects -* [Chia Blockchain](https://github.com/Chia-Network/chia-blockchain) -* [Climate Tokenization Engine](https://github.com/Chia-Network/Climate-Tokenization-Engine) -* [Climate Tokenization Engine User Interface](https://github.com/Chia-Network/Climate-Tokenization-Engine-UI) -* [Climate Explorer UI](https://github.com/Chia-Network/climate-explorer-ui) -* [Climate Wallet](https://github.com/Chia-Network/Climate-Wallet) -* [Climate Action Data Trust](https://github.com/Chia-Network/cadt) -* [Climate Action Data Trust UI](https://github.com/Chia-Network/cadt-ui) +- [Chia Blockchain](https://github.com/Chia-Network/chia-blockchain) +- [Climate Tokenization Engine](https://github.com/Chia-Network/Climate-Tokenization-Engine) +- [Climate Tokenization Engine User Interface](https://github.com/Chia-Network/Climate-Tokenization-Engine-UI) +- [Climate Explorer UI](https://github.com/Chia-Network/climate-explorer-ui) +- [Climate Wallet](https://github.com/Chia-Network/Climate-Wallet) +- [Climate Action Data Trust](https://github.com/Chia-Network/cadt) +- [Climate Action Data Trust UI](https://github.com/Chia-Network/cadt-ui) ## Hierarchy - `app`: - - `api`: API layer implementations - - `core`: service layer implementations - - `crud`: repository layer implementations - - `db`: database utilities - - `models`: database models - - `schemas`: schemas shared by all the layers + - `api`: API layer implementations + - `core`: service layer implementations + - `crud`: repository layer implementations + - `db`: database utilities + - `models`: database models + - `schemas`: schemas shared by all the layers - `tests`: pytest suites ## Installation and configuration @@ -56,7 +56,7 @@ Precompiled executables are available for Mac, Windows, and Linux (both ARM and ### Debian-based Linux Distros -For users of Debian, Ubuntu, Mint, PopOS, and other Debian-based distributions, a .deb file is provided on the [releases](https://github.com/Chia-Network/climate-token-driver/releases) page. This can be installed with +For users of Debian, Ubuntu, Mint, PopOS, and other Debian-based distributions, a .deb file is provided on the [releases](https://github.com/Chia-Network/climate-token-driver/releases) page. This can be installed with ```sh dpkg -i package-filename.deb @@ -97,7 +97,8 @@ sudo apt-get install climate-explorer-chia sudo systemctl start climate-tokenization-chia@ sudo systemctl start climate-explorer-chia@ ``` -For ``, enter the user that Chia runs as (the user with the `.chia` directory in their home directory). For example, if the `ubuntu` is where Chia runs, start Chia Climate Tokenization with `systemctl start climate-tokenization-chia@ubuntu`. + +For ``, enter the user that Chia runs as (the user with the `.chia` directory in their home directory). For example, if the `ubuntu` is where Chia runs, start Chia Climate Tokenization with `systemctl start climate-tokenization-chia@ubuntu`. 6. Set the Chia Climate Tokenization and Climate Explorer to run at boot @@ -106,7 +107,6 @@ sudo systemctl enable climate-tokenization-chia@ sudo systemctl enable climate-explorer-chia@ ``` - ### From Source - Clone this repo. @@ -139,7 +139,8 @@ sudo systemctl enable climate-explorer-chia@ ## Configurations Note there are two steps the application loads the configurations: -1. The application will first look for any environment variables set on the host machine for `MODE`, `CHIA_ROOT`, and `CONFIG_PATH`. Any variables not set on the host system will be loaded from the `.env` environment file, which is opened via `python-dotenv`, where `${CHIA_ROOT}` and `${CONFIG_PATH}` are pre-loaded. This file is not visible to end users in packaged binaries, and are suitable for binary builders to change the default *flavor* for the binary (though it is overridden by system environment variables). + +1. The application will first look for any environment variables set on the host machine for `MODE`, `CHIA_ROOT`, and `CONFIG_PATH`. Any variables not set on the host system will be loaded from the `.env` environment file, which is opened via `python-dotenv`, where `${CHIA_ROOT}` and `${CONFIG_PATH}` are pre-loaded. This file is not visible to end users in packaged binaries, and are suitable for binary builders to change the default _flavor_ for the binary (though it is overridden by system environment variables). 1. Then, a `config.yaml` file located at `${CHIA_ROOT}/${CONFIG_PATH}` is loaded, which adds to the configurations after `.env`. This part of the configuration is free to change by end binary users. @@ -175,7 +176,7 @@ As with `registry` mode, `client` mode is only designed to integrate with other Only when in `explorer` mode, the following configurations are relevant: -- `CLIMATE_EXPLORER_SERVER_HOST`: Network interface to bind the climate explorer to. Default is `0.0.0.0` as the Climate Explorer is intended to be a publicly available interface. Can be set to `127.0.0.1` to be privately available only on localhost. +- `CLIMATE_EXPLORER_SERVER_HOST`: Network interface to bind the climate explorer to. Default is `0.0.0.0` as the Climate Explorer is intended to be a publicly available interface. Can be set to `127.0.0.1` to be privately available only on localhost. - `CLIMATE_EXPLORER_PORT`: 31313 by default. - `DB_PATH`: the database this application writes to, relative to `${CHIA_ROOT}`. - `BLOCK_START`: the block to start scanning for climate token activities. @@ -223,15 +224,18 @@ Only when in `explorer` mode, the following configurations are relevant: # first ensure the `MODE` is set to the `dev` for all tests to be discoverable, then python -m pytest ./tests ``` + ### Commiting [Signed commits](https://docs.github.com/en/authentication/managing-commit-signature-verification/signing-commits) are required. ​This repo uses a [commit convention](https://www.conventionalcommits.org/en/v1.0.0/). A typical commit message might read: ​ + ``` fix: correct home screen layout ``` + ​ The first part of this is the commit "type". The most common types are "feat" for new features, and "fix" for bugfixes. Using these commit types helps us correctly manage our version numbers and changelogs. Since our release process calculates new version numbers from our commits it is very important to get this right. ​ @@ -250,10 +254,9 @@ The first part of this is the commit "type". The most common types are "feat" fo After the type and scope there should be a colon. - ​ +​ The "subject" of the commit follows. It should be a short indication of the change. The commit convention prefers that this is written in the present-imperative tense. - #### Branch Layout -All pull requests should be made against the `develop` branch. Commits to the `main` branch will trigger a release, so the `main` branch is always the code in the latest release. +All pull requests should be made against the `develop` branch. Commits to the `main` branch will trigger a release, so the `main` branch is always the code in the latest release. diff --git a/app/api/v1/activities.py b/app/api/v1/activities.py index 067ea6d..57f8851 100644 --- a/app/api/v1/activities.py +++ b/app/api/v1/activities.py @@ -15,8 +15,9 @@ router = APIRouter() + @router.get("/", response_model=schemas.ActivitiesResponse) -@disallow([ExecutionMode.REGISTRY, ExecutionMode.CLIENT]) +@disallow([ExecutionMode.REGISTRY, ExecutionMode.CLIENT]) # type: ignore[misc] async def get_activity( search: Optional[str] = None, search_by: Optional[schemas.ActivitySearchBy] = None, @@ -129,12 +130,12 @@ async def get_activity( @router.get("/activity-record", response_model=schemas.ActivityRecordResponse) -@disallow([ExecutionMode.REGISTRY, ExecutionMode.CLIENT]) +@disallow([ExecutionMode.REGISTRY, ExecutionMode.CLIENT]) # type: ignore[misc] async def get_activity_by_cw_unit_id( - cw_unit_id: str, - coin_id: str, - action_mode: str, - db: Session = Depends(deps.get_db_session), + cw_unit_id: str, + coin_id: str, + action_mode: str, + db: Session = Depends(deps.get_db_session), ) -> schemas.ActivityRecordResponse: """Get a single activity based on the unit's unitWarehouseId. @@ -143,7 +144,6 @@ async def get_activity_by_cw_unit_id( db_crud = crud.DBCrud(db=db) - # fetch unit and related data from CADT cw_filters: Dict[str, str] = {"warehouseUnitId": cw_unit_id} @@ -162,13 +162,13 @@ async def get_activity_by_cw_unit_id( if unit_with_metadata["marketplaceIdentifier"]: activity_filters["and"].append(models.Activity.asset_id == unit_with_metadata["marketplaceIdentifier"]) else: - logger.warning(f"retrieved unit does not contain marketplace identifier. unable to get activity record") + logger.warning("retrieved unit does not contain marketplace identifier. unable to get activity record") return schemas.ActivityRecordResponse() activity_filters["and"].append(models.Activity.mode == action_mode) activity_filters["and"].append(models.Activity.coin_id == coin_id) - activities: [models.Activity] + activities = [models.Activity] total: int # fetch activities with filters, 'total' var ignored @@ -182,11 +182,13 @@ async def get_activity_by_cw_unit_id( return schemas.ActivityRecordResponse() try: - activity = next((activity for activity in activities if activity.coin_id == coin_id and activity.mode == action_mode), None) + activity = next( + (activity for activity in activities if activity.coin_id == coin_id and activity.mode == action_mode), None + ) if activity is None: return schemas.ActivityRecordResponse() - except: - logger.warning(f"an exception occurred while processing activity record") + except Exception: + logger.warning("an exception occurred while processing activity record") return schemas.ActivityRecordResponse() unit_with_metadata = unit_with_metadata.copy() diff --git a/app/api/v1/core.py b/app/api/v1/core.py index fa78d23..1adf525 100644 --- a/app/api/v1/core.py +++ b/app/api/v1/core.py @@ -4,7 +4,7 @@ from fastapi import APIRouter -from app.api.v1 import activities, cron, keys, tokens, transactions, organizations +from app.api.v1 import activities, cron, keys, organizations, tokens, transactions router = APIRouter( prefix="/v1", diff --git a/app/api/v1/cron.py b/app/api/v1/cron.py index 9858774..315d114 100644 --- a/app/api/v1/cron.py +++ b/app/api/v1/cron.py @@ -4,10 +4,10 @@ import json from typing import List, Optional -from chia_rs import G1Element from chia.consensus.block_record import BlockRecord from chia.rpc.full_node_rpc_client import FullNodeRpcClient from chia.util.byte_types import hexstr_to_bytes +from chia_rs import G1Element from fastapi import APIRouter, HTTPException from fastapi.encoders import jsonable_encoder from fastapi_utils.tasks import repeat_every @@ -29,7 +29,7 @@ @router.on_event("startup") -@disallow([ExecutionMode.REGISTRY, ExecutionMode.CLIENT]) +@disallow([ExecutionMode.REGISTRY, ExecutionMode.CLIENT]) # type: ignore[misc] async def init_db() -> None: Engine = await get_engine_cls() @@ -142,7 +142,7 @@ async def _scan_token_activity( @router.on_event("startup") @repeat_every(seconds=60, logger=logger) -@disallow([ExecutionMode.REGISTRY, ExecutionMode.CLIENT]) +@disallow([ExecutionMode.REGISTRY, ExecutionMode.CLIENT]) # type: ignore[misc] async def scan_token_activity() -> None: if lock.locked(): return @@ -193,7 +193,7 @@ async def _scan_blockchain_state( @router.on_event("startup") @repeat_every(seconds=10, logger=logger) -@disallow([ExecutionMode.REGISTRY, ExecutionMode.CLIENT]) +@disallow([ExecutionMode.REGISTRY, ExecutionMode.CLIENT]) # type: ignore[misc] async def scan_blockchain_state() -> None: async with ( deps.get_db_session_context() as db, diff --git a/app/api/v1/keys.py b/app/api/v1/keys.py index ce345d4..078a4c1 100644 --- a/app/api/v1/keys.py +++ b/app/api/v1/keys.py @@ -1,13 +1,13 @@ from typing import Optional -from chia_rs import G1Element, PrivateKey from chia.consensus.coinbase import create_puzzlehash_for_pk +from chia.rpc.wallet_request_types import GetPrivateKey from chia.rpc.wallet_rpc_client import WalletRpcClient from chia.types.blockchain_format.sized_bytes import bytes32 from chia.util.bech32m import decode_puzzle_hash, encode_puzzle_hash -from chia.util.byte_types import hexstr_to_bytes from chia.util.ints import uint32 from chia.wallet.derive_keys import master_sk_to_wallet_sk, master_sk_to_wallet_sk_unhardened +from chia_rs import G1Element, PrivateKey from fastapi import APIRouter, Depends from app import schemas @@ -22,18 +22,19 @@ "/", response_model=schemas.Key, ) -@disallow([ExecutionMode.REGISTRY, ExecutionMode.EXPLORER]) +@disallow([ExecutionMode.REGISTRY, ExecutionMode.EXPLORER]) # type: ignore[misc] async def get_key( hardened: bool = False, derivation_index: int = 0, prefix: str = "bls1238", wallet_rpc_client: WalletRpcClient = Depends(deps.get_wallet_rpc_client), ) -> schemas.Key: - fingerprint: int = await wallet_rpc_client.get_logged_in_fingerprint() + fingerprint = await wallet_rpc_client.get_logged_in_fingerprint() + assert fingerprint.fingerprint is not None - result = await wallet_rpc_client.get_private_key(fingerprint) + result = await wallet_rpc_client.get_private_key(GetPrivateKey(fingerprint.fingerprint)) - secret_key = PrivateKey.from_bytes(hexstr_to_bytes(result["sk"])) + secret_key = result.private_key.sk wallet_secret_key: PrivateKey if hardened: diff --git a/app/api/v1/organizations.py b/app/api/v1/organizations.py index 68c7472..1cdfbd0 100644 --- a/app/api/v1/organizations.py +++ b/app/api/v1/organizations.py @@ -1,7 +1,6 @@ from typing import Any from fastapi import APIRouter -from fastapi.params import Depends from app import crud from app.config import ExecutionMode, settings @@ -9,12 +8,13 @@ router = APIRouter() + # pass through resource to expose organization data from cadt @router.get("/", response_model=Any) -@disallow([ExecutionMode.REGISTRY, ExecutionMode.CLIENT]) +@disallow([ExecutionMode.REGISTRY, ExecutionMode.CLIENT]) # type: ignore[misc] async def get_organizations() -> Any: all_organizations = crud.ClimateWareHouseCrud( url=settings.CADT_API_SERVER_HOST, api_key=settings.CADT_API_KEY, ).get_climate_organizations() - return all_organizations \ No newline at end of file + return all_organizations diff --git a/app/api/v1/tokens.py b/app/api/v1/tokens.py index 49927ba..a0d9b83 100644 --- a/app/api/v1/tokens.py +++ b/app/api/v1/tokens.py @@ -1,12 +1,12 @@ import json from typing import Any, Dict, Tuple -from chia_rs import G1Element, G2Element from chia.rpc.wallet_rpc_client import WalletRpcClient from chia.types.blockchain_format.sized_bytes import bytes32 from chia.types.coin_spend import CoinSpend from chia.types.spend_bundle import SpendBundle from chia.util.byte_types import hexstr_to_bytes +from chia_rs import G1Element, G2Element from fastapi import APIRouter, Depends from app import schemas @@ -25,7 +25,7 @@ "/", response_model=schemas.TokenizationTxResponse, ) -@disallow([ExecutionMode.EXPLORER, ExecutionMode.CLIENT]) +@disallow([ExecutionMode.EXPLORER, ExecutionMode.CLIENT]) # type: ignore[misc] async def create_tokenization_tx( request: schemas.TokenizationTxRequest, wallet_rpc_client: WalletRpcClient = Depends(deps.get_wallet_rpc_client), @@ -105,7 +105,7 @@ async def create_tokenization_tx( "/{asset_id}/detokenize", response_model=schemas.DetokenizationTxResponse, ) -@disallow([ExecutionMode.EXPLORER, ExecutionMode.CLIENT]) +@disallow([ExecutionMode.EXPLORER, ExecutionMode.CLIENT]) # type: ignore[misc] async def create_detokenization_tx( asset_id: str, request: schemas.DetokenizationTxRequest, @@ -145,7 +145,7 @@ async def create_detokenization_tx( "/{asset_id}/request-detokenization", response_model=schemas.DetokenizationFileResponse, ) -@disallow([ExecutionMode.EXPLORER, ExecutionMode.REGISTRY]) +@disallow([ExecutionMode.EXPLORER, ExecutionMode.REGISTRY]) # type: ignore[misc] async def create_detokenization_file( asset_id: str, request: schemas.DetokenizationFileRequest, @@ -216,7 +216,7 @@ async def create_detokenization_file( "/parse-detokenization", response_model=schemas.DetokenizationFileParseResponse, ) -@disallow([ExecutionMode.EXPLORER, ExecutionMode.CLIENT]) +@disallow([ExecutionMode.EXPLORER, ExecutionMode.CLIENT]) # type: ignore[misc] async def parse_detokenization_file( content: str, ) -> schemas.DetokenizationFileParseResponse: @@ -252,7 +252,7 @@ async def parse_detokenization_file( "/{asset_id}/permissionless-retire", response_model=schemas.PermissionlessRetirementTxResponse, ) -@disallow([ExecutionMode.EXPLORER, ExecutionMode.REGISTRY]) +@disallow([ExecutionMode.EXPLORER, ExecutionMode.REGISTRY]) # type: ignore[misc] async def create_permissionless_retirement_tx( asset_id: str, request: schemas.PermissionlessRetirementTxRequest, diff --git a/app/api/v1/transactions.py b/app/api/v1/transactions.py index 30df6b7..38bfee0 100644 --- a/app/api/v1/transactions.py +++ b/app/api/v1/transactions.py @@ -29,7 +29,7 @@ "/{transaction_id}", response_model=schemas.Transaction, ) -@disallow([ExecutionMode.EXPLORER]) +@disallow([ExecutionMode.EXPLORER]) # type: ignore[misc] async def get_transaction( transaction_id: str, wallet_rpc_client: WalletRpcClient = Depends(deps.get_wallet_rpc_client), @@ -40,7 +40,6 @@ async def get_transaction( """ transaction_record: TransactionRecord = await wallet_rpc_client.get_transaction( - wallet_id=0, transaction_id=bytes32.from_hexstr(transaction_id), ) @@ -108,7 +107,7 @@ async def get_transactions( gateway_cat_puzzle: Program = construct_cat_puzzle( mod_code=CAT_MOD, limitations_program_hash=cat_info.limitations_program_hash, - inner_puzzle=gateway_puzzle, + inner_puzzle_or_hash=gateway_puzzle, ) gateway_cat_puzzle_hash: bytes32 = gateway_cat_puzzle.get_tree_hash() diff --git a/app/core/chialisp/tail.py b/app/core/chialisp/tail.py index ab66b5b..741defa 100644 --- a/app/core/chialisp/tail.py +++ b/app/core/chialisp/tail.py @@ -2,9 +2,9 @@ from typing import Optional -from chia_rs import G1Element from chia.types.blockchain_format.program import Program from chia.types.blockchain_format.sized_bytes import bytes32 +from chia_rs import G1Element from app.core.chialisp.load_clvm import load_clvm_locally from app.core.types import GatewayMode diff --git a/app/core/climate_wallet/wallet.py b/app/core/climate_wallet/wallet.py index 678e3bb..8a4e307 100644 --- a/app/core/climate_wallet/wallet.py +++ b/app/core/climate_wallet/wallet.py @@ -28,6 +28,7 @@ from chia.wallet.util.compute_memos import compute_memos from chia.wallet.util.tx_config import DEFAULT_TX_CONFIG from chia.wallet.util.wallet_types import WalletType +from chia.wallet.wallet_spend_bundle import WalletSpendBundle from chia_rs import AugSchemeMPL, G1Element, G2Element, PrivateKey from app.core.chialisp.gateway import create_gateway_puzzle, parse_gateway_spend @@ -135,7 +136,7 @@ def has_wallet_client(self) -> bool: return self.wallet_client is not None @property - def delegated_signatures(self) -> Dict[Tuple[bytes, bytes], G2Element]: + def delegated_signatures(self) -> Dict[Tuple[G1Element, bytes], G2Element]: return { (self.root_public_key, message): signature for ( @@ -188,7 +189,7 @@ async def _create_transaction( to_puzzle_hash: Optional[bytes32] = None, key_value_pairs: Optional[List[Tuple[Any, Any]]] = None, gateway_public_key: Optional[G1Element] = None, - public_key_to_secret_key: Optional[Dict[bytes, PrivateKey]] = None, + public_key_to_secret_key: Optional[Dict[G1Element, PrivateKey]] = None, allow_missing_signature: bool = False, wallet_id: int = 1, ) -> Dict[str, Any]: @@ -224,22 +225,22 @@ async def _create_transaction( aggregated_signature=signature, ) - response = await get_created_signed_transactions( + transactions = await get_created_signed_transactions( transaction_request=transaction_request, wallet_id=wallet_id, wallet_client=self.wallet_client, ) new_txs = [] - for tx in response.transactions: + for tx in transactions: if unsigned_gateway_coin_spend.coin in tx.additions: - spend_bundle = SpendBundle.aggregate( + spend_bundle = WalletSpendBundle.aggregate( [gateway_spend_bundle] + ([] if tx.spend_bundle is None else [tx.spend_bundle]) ) additions = [ add for add in tx.additions if add != unsigned_gateway_coin_spend.coin ] + gateway_spend_bundle.additions() else: - spend_bundle = tx.spend_bundle + spend_bundle = WalletSpendBundle.aggregate(([] if tx.spend_bundle is None else [tx.spend_bundle])) additions = tx.additions removals = [rem for rem in tx.removals if rem not in additions] new_tx = dataclasses.replace( @@ -303,17 +304,17 @@ async def _create_client_transaction( memos=[], ) ], - fee=0, + fee=uint64(0), ) - response = await get_created_signed_transactions( + transactions = await get_created_signed_transactions( transaction_request=transaction_request, wallet_id=wallet_id, wallet_client=self.wallet_client, ) - if len(response.transactions) != 1: - raise ValueError(f"Transaction record has unexpected length {len(response.transactions)}!") + if len(transactions) != 1: + raise ValueError(f"Transaction record has unexpected length {len(transactions)}!") - transaction_record = response.transactions[0] + transaction_record = transactions[0] if transaction_record.spend_bundle is None: raise ValueError("No spend bundle created!") coin_spend: CoinSpend = transaction_record.spend_bundle.coin_spends[0] @@ -461,8 +462,8 @@ async def parse_detokenization_request( if puzzle_args is None: continue - (_, asset_id, inner_puzzle) = puzzle_args - asset_id = asset_id.as_atom() + (_, asset_id_program, inner_puzzle) = puzzle_args + asset_id = asset_id_program.as_atom() inner_solution = solution.at("f") # check for gateway puzzle @@ -553,10 +554,10 @@ async def sign_and_send_detokenization_request( if aggregated_signature == G2Element(): raise ValueError("Invalid detokenization request!") - spend_bundle = SpendBundle.aggregate( + spend_bundle = WalletSpendBundle.aggregate( [ unsigned_spend_bundle, - SpendBundle(coin_spends=[], aggregated_signature=aggregated_signature), + WalletSpendBundle(coin_spends=[], aggregated_signature=aggregated_signature), ] ) if gateway_coin_spend is None: @@ -677,20 +678,20 @@ async def get_activities( delegated_solution: Program = tail_solution.at("r") key_value_pairs: Program = delegated_solution.at("f") - metadata: Dict[bytes, bytes] = {} + metadata: Dict[str, str] = {} for key_value_pair in key_value_pairs.as_iter(): if (not key_value_pair.listp()) or (key_value_pair.at("r").listp()): logger.warning(f"Coin {coin.name()} has incorrect metadata structure") continue - key = key_value_pair.at("f").as_atom() - value = key_value_pair.at("r").as_atom() + key_bytes = key_value_pair.at("f").as_atom() + value_bytes = key_value_pair.at("r").as_atom() - key = key.decode() + key = key_bytes.decode() if key in ["bp"]: - value = f"0x{value.hex()}" + value = f"0x{value_bytes.hex()}" elif key in ["ba", "bn"]: - value = value.decode() + value = value_bytes.decode() else: raise ValueError("Unknown key '{key}'!") diff --git a/app/core/climate_wallet/wallet_utils.py b/app/core/climate_wallet/wallet_utils.py index 8a3f0bd..e7db558 100644 --- a/app/core/climate_wallet/wallet_utils.py +++ b/app/core/climate_wallet/wallet_utils.py @@ -98,7 +98,7 @@ def create_gateway_request_and_spend( extra_delta: int = 0 conditions = [] - conditions.append([ConditionOpcode.CREATE_COIN, None, -113, tail_program, tail_solution]) + conditions.append(Program.to([ConditionOpcode.CREATE_COIN, None, -113, tail_program, tail_solution])) if to_puzzle_hash is None: if mode in [GatewayMode.TOKENIZATION]: @@ -107,7 +107,7 @@ def create_gateway_request_and_spend( extra_delta = -amount else: - conditions.append([ConditionOpcode.CREATE_COIN, to_puzzle_hash, amount, [to_puzzle_hash]]) + conditions.append(Program.to([ConditionOpcode.CREATE_COIN, to_puzzle_hash, amount, [to_puzzle_hash]])) conditions_program = Program.to(conditions) gateway_announcement = create_gateway_announcement( @@ -118,7 +118,7 @@ def create_gateway_request_and_spend( coins=coins, payments=[gateway_payment], coin_announcements=[gateway_announcement], - fee=fee, + fee=uint64(fee), ) gateway_solution: Program = create_gateway_solution( diff --git a/app/core/derive_keys.py b/app/core/derive_keys.py index 08949a2..f132b0c 100644 --- a/app/core/derive_keys.py +++ b/app/core/derive_keys.py @@ -1,7 +1,7 @@ from __future__ import annotations -from chia_rs import PrivateKey from chia.wallet.derive_keys import _derive_path_unhardened +from chia_rs import PrivateKey def master_sk_to_root_sk(master: PrivateKey) -> PrivateKey: diff --git a/app/core/types.py b/app/core/types.py index 6c04ddb..8dc60b1 100644 --- a/app/core/types.py +++ b/app/core/types.py @@ -7,8 +7,8 @@ from chia.types.blockchain_format.coin import Coin from chia.types.blockchain_format.program import Program from chia.types.blockchain_format.sized_bytes import bytes32 -from chia.types.condition_opcodes import ConditionOpcode -from chia.wallet.conditions import CreateCoinAnnouncement, CreatePuzzleAnnouncement +from chia.util.ints import uint64 +from chia.wallet.conditions import CreateCoinAnnouncement, CreatePuzzleAnnouncement, ReserveFee from chia.wallet.payment import Payment CLIMATE_WALLET_INDEX = 2 @@ -52,21 +52,21 @@ class TransactionRequest(object): payments: List[Payment] = dataclasses.field(default_factory=list) coin_announcements: List[CreateCoinAnnouncement] = dataclasses.field(default_factory=list) puzzle_announcements: List[CreatePuzzleAnnouncement] = dataclasses.field(default_factory=list) - fee: int = dataclasses.field(default=0) + fee: uint64 = dataclasses.field(default=uint64(0)) def to_program(self) -> Program: conditions = [] for payment in self.payments: - conditions.append([ConditionOpcode.CREATE_COIN] + payment.as_condition_args()) + conditions.append(payment.as_condition()) - for announcement in self.coin_announcements: - conditions.append(announcement.to_program()) + for coin_announcement in self.coin_announcements: + conditions.append(coin_announcement.to_program()) - for announcement in self.puzzle_announcements: - conditions.append(announcement.to_program()) + for puz_announcement in self.puzzle_announcements: + conditions.append(puz_announcement.to_program()) if self.fee: - conditions.append([ConditionOpcode.RESERVE_FEE, self.fee]) + conditions.append(ReserveFee(self.fee).to_program()) ret: Program = Program.to(conditions) return ret diff --git a/app/core/utils.py b/app/core/utils.py index 61b1a90..191d537 100644 --- a/app/core/utils.py +++ b/app/core/utils.py @@ -4,6 +4,7 @@ from chia.consensus.constants import ConsensusConstants, replace_str_to_bytes from chia.consensus.default_constants import DEFAULT_CONSTANTS +from chia.rpc.wallet_request_types import GetPrivateKey from chia.rpc.wallet_rpc_client import WalletRpcClient from chia.types.blockchain_format.sized_bytes import bytes32 from chia.util.config import load_config @@ -42,10 +43,11 @@ async def get_constants( async def get_climate_secret_key( wallet_client: WalletRpcClient, ) -> PrivateKey: - fingerprint: int = await wallet_client.get_logged_in_fingerprint() - result = await wallet_client.get_private_key(fingerprint=fingerprint) + fingerprint = await wallet_client.get_logged_in_fingerprint() + assert fingerprint.fingerprint is not None + result = await wallet_client.get_private_key(GetPrivateKey(fingerprint=fingerprint.fingerprint)) - master_secret_key: PrivateKey = PrivateKey.from_bytes(bytes.fromhex(result["sk"])) + master_secret_key = result.private_key.sk root_secret_key: PrivateKey = master_sk_to_root_sk(master_secret_key) return root_secret_key @@ -91,9 +93,11 @@ async def get_wallet_info_by_id( async def get_first_puzzle_hash( wallet_client: WalletRpcClient, ) -> bytes32: - fingerprint: int = await wallet_client.get_logged_in_fingerprint() - result = await wallet_client.get_private_key(fingerprint=fingerprint) - master_secret_key: PrivateKey = PrivateKey.from_bytes(bytes.fromhex(result["sk"])) + fingerprint = await wallet_client.get_logged_in_fingerprint() + assert fingerprint.fingerprint is not None + + result = await wallet_client.get_private_key(GetPrivateKey(fingerprint=fingerprint.fingerprint)) + master_secret_key = result.private_key.sk wallet_secret_key: PrivateKey = master_sk_to_wallet_sk_unhardened(master_secret_key, uint32(0)) wallet_public_key: G1Element = wallet_secret_key.get_g1() @@ -109,10 +113,7 @@ async def get_created_signed_transactions( wallet_id: int, wallet_client: WalletRpcClient, ) -> List[TransactionRecord]: - if transaction_request.coins is None: - transaction_request.coins = [] # type: ignore - - transaction_records = await wallet_client.create_signed_transactions( + response = await wallet_client.create_signed_transactions( coins=transaction_request.coins, additions=transaction_request.additions, fee=uint64(transaction_request.fee), @@ -121,4 +122,4 @@ async def get_created_signed_transactions( extra_conditions=(*transaction_request.coin_announcements, *transaction_request.puzzle_announcements), ) - return transaction_records + return response.transactions diff --git a/app/crud/chia.py b/app/crud/chia.py index b8dbfce..590da0e 100644 --- a/app/crud/chia.py +++ b/app/crud/chia.py @@ -6,10 +6,10 @@ from urllib.parse import urlencode, urlparse import requests -from chia_rs import G1Element from chia.rpc.full_node_rpc_client import FullNodeRpcClient from chia.types.blockchain_format.coin import Coin from chia.types.coin_record import CoinRecord +from chia_rs import G1Element from fastapi.encoders import jsonable_encoder from app import schemas @@ -46,7 +46,7 @@ def _get_paginated_data(self, path: str, search_params: Dict[str, Any]) -> List[ Returns: A list of all data retrieved from the paginated API. """ - all_data = [] + all_data: list[dict[str, Any]] = [] page = 1 limit = 10 @@ -68,16 +68,19 @@ def _get_paginated_data(self, path: str, search_params: Dict[str, Any]) -> List[ data = response.json() if data is None: # some cadt endpoints return null with no pagination info if no data is found - # to prevent an infinite loop need to assume that there is no data matching the search from this iteration on + # to prevent an infinite loop need to assume that there is no data matching + # the search from this iteration on return all_data try: - if data["page"] and (data["pageCount"] >= 0) and len(data["data"]) >= 0: # page count can be 0 (as of when this was written) - all_data.extend(data["data"]) # Add data from the current page + if ( + data["page"] and (data["pageCount"] >= 0) and len(data["data"]) >= 0 + ): # page count can be 0 (as of when this was written) + all_data.extend(data["data"]) # Add data from the current page else: - all_data.append(data) # data was not paginated, append and return + all_data.append(data) # data was not paginated, append and return return all_data - except: + except Exception: all_data.append(data) # data was not paginated, append and return return all_data @@ -192,7 +195,7 @@ def combine_climate_units_and_metadata(self, search: Dict[str, Any]) -> List[Dic warehouse_project_id = unit["issuance"]["warehouseProjectId"] project = project_by_id[warehouse_project_id] except (KeyError, TypeError): - logger.warning(f"Can not get project by warehouse_project_id") + logger.warning("Can not get project by warehouse_project_id") continue org_metadata = metadata_by_id.get(unit_org_uid) diff --git a/app/crud/db.py b/app/crud/db.py index f10466a..af7f837 100644 --- a/app/crud/db.py +++ b/app/crud/db.py @@ -63,7 +63,7 @@ def select_first_db(self, model: Any, order_by: Any) -> Any: raise errorcode.internal_server_error(message="Select DB Failure") def select_activity_with_pagination( - self, model: Any, filters: Any, order_by: Any, limit: int = None, page: int = None + self, model: Any, filters: Any, order_by: Any, limit: Optional[int] = None, page: Optional[int] = None ) -> Tuple[Any, int]: try: query = self.db.query(model).filter(or_(*filters["or"]), and_(*filters["and"])) diff --git a/app/schemas/__init__.py b/app/schemas/__init__.py index 90e48ea..e913ce4 100644 --- a/app/schemas/__init__.py +++ b/app/schemas/__init__.py @@ -4,9 +4,9 @@ ActivitiesResponse, Activity, ActivityBase, + ActivityRecordResponse, ActivitySearchBy, ActivityWithCW, - ActivityRecordResponse ) from app.schemas.key import Key # noqa: F401 from app.schemas.metadata import ( # noqa: F401 diff --git a/app/schemas/activity.py b/app/schemas/activity.py index 25776df..b45d480 100644 --- a/app/schemas/activity.py +++ b/app/schemas/activity.py @@ -1,7 +1,6 @@ from __future__ import annotations import enum -from email.policy import default from typing import Any, Dict, List, Optional, Union from pydantic import Field, validator @@ -60,5 +59,6 @@ class ActivitiesResponse(BaseModel): activities: List[ActivityWithCW] = Field(default_factory=list) total: int = 0 + class ActivityRecordResponse(BaseModel): - activity: ActivityWithCW = Field(default=None) \ No newline at end of file + activity: Optional[ActivityWithCW] = Field(default=None) diff --git a/app/utils.py b/app/utils.py index 2218da7..2a5635d 100644 --- a/app/utils.py +++ b/app/utils.py @@ -35,17 +35,17 @@ # return decorator -def disallow(modes: List[ExecutionMode]): - def _disallow(f: Callable): +def disallow(modes: List[ExecutionMode]): # type: ignore[no-untyped-def] + def _disallow(f: Callable): # type: ignore[no-untyped-def, type-arg] if settings.MODE in modes: - async def _f(*args, **kargs): + async def _f(*args, **kargs): # type: ignore[no-untyped-def] return status.HTTP_405_METHOD_NOT_ALLOWED else: @functools.wraps(f) - async def _f(*args, **kargs): + async def _f(*args, **kargs): # type: ignore[no-untyped-def] return await f(*args, **kargs) return _f diff --git a/config.yaml b/config.yaml index 22e2ddc..d006411 100644 --- a/config.yaml +++ b/config.yaml @@ -9,7 +9,7 @@ CADT_API_KEY: CHIA_HOSTNAME: localhost CHIA_FULL_NODE_RPC_PORT: 8555 CHIA_WALLET_RPC_PORT: 9256 -DEFAULT_FEE: 1000000000 # mojos +DEFAULT_FEE: 1000000000 # mojos CLIMATE_TOKEN_REGISTRY_PORT: 31312 CLIMATE_EXPLORER_PORT: 31313 CLIMATE_TOKEN_CLIENT_PORT: 31314 diff --git a/poetry.lock b/poetry.lock index affc7bf..9d8b8ac 100644 --- a/poetry.lock +++ b/poetry.lock @@ -551,6 +551,17 @@ files = [ [package.dependencies] pycparser = "*" +[[package]] +name = "cfgv" +version = "3.4.0" +description = "Validate configuration and produce human readable error messages." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, + {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, +] + [[package]] name = "charset-normalizer" version = "2.1.1" @@ -1010,6 +1021,17 @@ files = [ {file = "decli-0.5.2.tar.gz", hash = "sha256:f2cde55034a75c819c630c7655a844c612f2598c42c21299160465df6ad463ad"}, ] +[[package]] +name = "distlib" +version = "0.3.9" +description = "Distribution utilities" +optional = false +python-versions = "*" +files = [ + {file = "distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87"}, + {file = "distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403"}, +] + [[package]] name = "dnslib" version = "0.9.25" @@ -1371,6 +1393,20 @@ http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] zstd = ["zstandard (>=0.18.0)"] +[[package]] +name = "identify" +version = "2.6.1" +description = "File identification library for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "identify-2.6.1-py2.py3-none-any.whl", hash = "sha256:53863bcac7caf8d2ed85bd20312ea5dcfc22226800f6d6881f232d861db5a8f0"}, + {file = "identify-2.6.1.tar.gz", hash = "sha256:91478c5fb7c3aac5ff7bf9b4344f803843dc586832d5f110d672b19aa1984c98"}, +] + +[package.extras] +license = ["ukkonen"] + [[package]] name = "idna" version = "3.8" @@ -1482,21 +1518,25 @@ test = ["portend", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-c [[package]] name = "jaraco-functools" -version = "4.0.2" +version = "4.1.0" description = "Functools like those found in stdlib" optional = false python-versions = ">=3.8" files = [ - {file = "jaraco.functools-4.0.2-py3-none-any.whl", hash = "sha256:c9d16a3ed4ccb5a889ad8e0b7a343401ee5b2a71cee6ed192d3f68bc351e94e3"}, - {file = "jaraco_functools-4.0.2.tar.gz", hash = "sha256:3460c74cd0d32bf82b9576bbb3527c4364d5b27a21f5158a62aed6c4b42e23f5"}, + {file = "jaraco.functools-4.1.0-py3-none-any.whl", hash = "sha256:ad159f13428bc4acbf5541ad6dec511f91573b90fba04df61dafa2a1231cf649"}, + {file = "jaraco_functools-4.1.0.tar.gz", hash = "sha256:70f7e0e2ae076498e212562325e805204fc092d7b4c17e0e86c959e249701a9d"}, ] [package.dependencies] more-itertools = "*" [package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -test = ["jaraco.classes", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["jaraco.classes", "pytest (>=6,!=8.1.*)"] +type = ["pytest-mypy"] [[package]] name = "jeepney" @@ -1758,48 +1798,49 @@ files = [ [[package]] name = "mypy" -version = "1.4.1" +version = "1.11.1" description = "Optional static typing for Python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "mypy-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:566e72b0cd6598503e48ea610e0052d1b8168e60a46e0bfd34b3acf2d57f96a8"}, - {file = "mypy-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ca637024ca67ab24a7fd6f65d280572c3794665eaf5edcc7e90a866544076878"}, - {file = "mypy-1.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dde1d180cd84f0624c5dcaaa89c89775550a675aff96b5848de78fb11adabcd"}, - {file = "mypy-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8c4d8e89aa7de683e2056a581ce63c46a0c41e31bd2b6d34144e2c80f5ea53dc"}, - {file = "mypy-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:bfdca17c36ae01a21274a3c387a63aa1aafe72bff976522886869ef131b937f1"}, - {file = "mypy-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7549fbf655e5825d787bbc9ecf6028731973f78088fbca3a1f4145c39ef09462"}, - {file = "mypy-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:98324ec3ecf12296e6422939e54763faedbfcc502ea4a4c38502082711867258"}, - {file = "mypy-1.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:141dedfdbfe8a04142881ff30ce6e6653c9685b354876b12e4fe6c78598b45e2"}, - {file = "mypy-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8207b7105829eca6f3d774f64a904190bb2231de91b8b186d21ffd98005f14a7"}, - {file = "mypy-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:16f0db5b641ba159eff72cff08edc3875f2b62b2fa2bc24f68c1e7a4e8232d01"}, - {file = "mypy-1.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:470c969bb3f9a9efcedbadcd19a74ffb34a25f8e6b0e02dae7c0e71f8372f97b"}, - {file = "mypy-1.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5952d2d18b79f7dc25e62e014fe5a23eb1a3d2bc66318df8988a01b1a037c5b"}, - {file = "mypy-1.4.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:190b6bab0302cec4e9e6767d3eb66085aef2a1cc98fe04936d8a42ed2ba77bb7"}, - {file = "mypy-1.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9d40652cc4fe33871ad3338581dca3297ff5f2213d0df345bcfbde5162abf0c9"}, - {file = "mypy-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:01fd2e9f85622d981fd9063bfaef1aed6e336eaacca00892cd2d82801ab7c042"}, - {file = "mypy-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2460a58faeea905aeb1b9b36f5065f2dc9a9c6e4c992a6499a2360c6c74ceca3"}, - {file = "mypy-1.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2746d69a8196698146a3dbe29104f9eb6a2a4d8a27878d92169a6c0b74435b6"}, - {file = "mypy-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ae704dcfaa180ff7c4cfbad23e74321a2b774f92ca77fd94ce1049175a21c97f"}, - {file = "mypy-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:43d24f6437925ce50139a310a64b2ab048cb2d3694c84c71c3f2a1626d8101dc"}, - {file = "mypy-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c482e1246726616088532b5e964e39765b6d1520791348e6c9dc3af25b233828"}, - {file = "mypy-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:43b592511672017f5b1a483527fd2684347fdffc041c9ef53428c8dc530f79a3"}, - {file = "mypy-1.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:34a9239d5b3502c17f07fd7c0b2ae6b7dd7d7f6af35fbb5072c6208e76295816"}, - {file = "mypy-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5703097c4936bbb9e9bce41478c8d08edd2865e177dc4c52be759f81ee4dd26c"}, - {file = "mypy-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e02d700ec8d9b1859790c0475df4e4092c7bf3272a4fd2c9f33d87fac4427b8f"}, - {file = "mypy-1.4.1-py3-none-any.whl", hash = "sha256:45d32cec14e7b97af848bddd97d85ea4f0db4d5a149ed9676caa4eb2f7402bb4"}, - {file = "mypy-1.4.1.tar.gz", hash = "sha256:9bbcd9ab8ea1f2e1c8031c21445b511442cc45c89951e49bbf852cbb70755b1b"}, + {file = "mypy-1.11.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a32fc80b63de4b5b3e65f4be82b4cfa362a46702672aa6a0f443b4689af7008c"}, + {file = "mypy-1.11.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c1952f5ea8a5a959b05ed5f16452fddadbaae48b5d39235ab4c3fc444d5fd411"}, + {file = "mypy-1.11.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1e30dc3bfa4e157e53c1d17a0dad20f89dc433393e7702b813c10e200843b03"}, + {file = "mypy-1.11.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2c63350af88f43a66d3dfeeeb8d77af34a4f07d760b9eb3a8697f0386c7590b4"}, + {file = "mypy-1.11.1-cp310-cp310-win_amd64.whl", hash = "sha256:a831671bad47186603872a3abc19634f3011d7f83b083762c942442d51c58d58"}, + {file = "mypy-1.11.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7b6343d338390bb946d449677726edf60102a1c96079b4f002dedff375953fc5"}, + {file = "mypy-1.11.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e4fe9f4e5e521b458d8feb52547f4bade7ef8c93238dfb5bbc790d9ff2d770ca"}, + {file = "mypy-1.11.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:886c9dbecc87b9516eff294541bf7f3655722bf22bb898ee06985cd7269898de"}, + {file = "mypy-1.11.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fca4a60e1dd9fd0193ae0067eaeeb962f2d79e0d9f0f66223a0682f26ffcc809"}, + {file = "mypy-1.11.1-cp311-cp311-win_amd64.whl", hash = "sha256:0bd53faf56de9643336aeea1c925012837432b5faf1701ccca7fde70166ccf72"}, + {file = "mypy-1.11.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f39918a50f74dc5969807dcfaecafa804fa7f90c9d60506835036cc1bc891dc8"}, + {file = "mypy-1.11.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0bc71d1fb27a428139dd78621953effe0d208aed9857cb08d002280b0422003a"}, + {file = "mypy-1.11.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b868d3bcff720dd7217c383474008ddabaf048fad8d78ed948bb4b624870a417"}, + {file = "mypy-1.11.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a707ec1527ffcdd1c784d0924bf5cb15cd7f22683b919668a04d2b9c34549d2e"}, + {file = "mypy-1.11.1-cp312-cp312-win_amd64.whl", hash = "sha256:64f4a90e3ea07f590c5bcf9029035cf0efeae5ba8be511a8caada1a4893f5525"}, + {file = "mypy-1.11.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:749fd3213916f1751fff995fccf20c6195cae941dc968f3aaadf9bb4e430e5a2"}, + {file = "mypy-1.11.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b639dce63a0b19085213ec5fdd8cffd1d81988f47a2dec7100e93564f3e8fb3b"}, + {file = "mypy-1.11.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c956b49c5d865394d62941b109728c5c596a415e9c5b2be663dd26a1ff07bc0"}, + {file = "mypy-1.11.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45df906e8b6804ef4b666af29a87ad9f5921aad091c79cc38e12198e220beabd"}, + {file = "mypy-1.11.1-cp38-cp38-win_amd64.whl", hash = "sha256:d44be7551689d9d47b7abc27c71257adfdb53f03880841a5db15ddb22dc63edb"}, + {file = "mypy-1.11.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2684d3f693073ab89d76da8e3921883019ea8a3ec20fa5d8ecca6a2db4c54bbe"}, + {file = "mypy-1.11.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:79c07eb282cb457473add5052b63925e5cc97dfab9812ee65a7c7ab5e3cb551c"}, + {file = "mypy-1.11.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11965c2f571ded6239977b14deebd3f4c3abd9a92398712d6da3a772974fad69"}, + {file = "mypy-1.11.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a2b43895a0f8154df6519706d9bca8280cda52d3d9d1514b2d9c3e26792a0b74"}, + {file = "mypy-1.11.1-cp39-cp39-win_amd64.whl", hash = "sha256:1a81cf05975fd61aec5ae16501a091cfb9f605dc3e3c878c0da32f250b74760b"}, + {file = "mypy-1.11.1-py3-none-any.whl", hash = "sha256:0624bdb940255d2dd24e829d99a13cfeb72e4e9031f9492148f410ed30bcab54"}, + {file = "mypy-1.11.1.tar.gz", hash = "sha256:f404a0b069709f18bbdb702eb3dcfe51910602995de00bd39cea3050b5772d08"}, ] [package.dependencies] mypy-extensions = ">=1.0.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=4.1.0" +typing-extensions = ">=4.6.0" [package.extras] dmypy = ["psutil (>=4.0)"] install-types = ["pip"] -python2 = ["typed-ast (>=1.4.0,<2)"] +mypyc = ["setuptools (>=50)"] reports = ["lxml"] [[package]] @@ -1813,6 +1854,17 @@ files = [ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] +[[package]] +name = "nodeenv" +version = "1.9.1" +description = "Node.js virtual environment builder" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, + {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, +] + [[package]] name = "packaging" version = "24.0" @@ -1896,6 +1948,24 @@ docs = ["sphinx (>=1.7.1)"] redis = ["redis"] tests = ["pytest (>=5.4.1)", "pytest-cov (>=2.8.1)", "pytest-mypy (>=0.8.0)", "pytest-timeout (>=2.1.0)", "redis", "sphinx (>=6.0.0)", "types-redis"] +[[package]] +name = "pre-commit" +version = "3.7.1" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +optional = false +python-versions = ">=3.9" +files = [ + {file = "pre_commit-3.7.1-py2.py3-none-any.whl", hash = "sha256:fae36fd1d7ad7d6a5a1c0b0d5adb2ed1a3bda5a21bf6c3e5372073d7a11cd4c5"}, + {file = "pre_commit-3.7.1.tar.gz", hash = "sha256:8ca3ad567bc78a4972a3f1a477e94a79d4597e8140a6e0b651c5e33899c3654a"}, +] + +[package.dependencies] +cfgv = ">=2.0.0" +identify = ">=1.0.0" +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +virtualenv = ">=20.10.0" + [[package]] name = "prompt-toolkit" version = "3.0.47" @@ -2099,6 +2169,20 @@ files = [ [package.extras] cli = ["click (>=5.0)"] +[[package]] +name = "pyupgrade" +version = "3.16.0" +description = "A tool to automatically upgrade syntax for newer versions." +optional = false +python-versions = ">=3.8.1" +files = [ + {file = "pyupgrade-3.16.0-py2.py3-none-any.whl", hash = "sha256:7a54ee28f3024d027048d49d101e5c702e88c85edc3a1d08b636c50ebef2a97d"}, + {file = "pyupgrade-3.16.0.tar.gz", hash = "sha256:237893a05d5b117259b31b423f23cbae4bce0b7eae57ba9a52c06098c2ddd76f"}, +] + +[package.dependencies] +tokenize-rt = ">=5.2.0" + [[package]] name = "pywin32" version = "306" @@ -2563,6 +2647,17 @@ files = [ [package.extras] tests = ["pytest", "pytest-cov"] +[[package]] +name = "tokenize-rt" +version = "6.1.0" +description = "A wrapper around the stdlib `tokenize` which roundtrips." +optional = false +python-versions = ">=3.9" +files = [ + {file = "tokenize_rt-6.1.0-py2.py3-none-any.whl", hash = "sha256:d706141cdec4aa5f358945abe36b911b8cbdc844545da99e811250c0cee9b6fc"}, + {file = "tokenize_rt-6.1.0.tar.gz", hash = "sha256:e8ee836616c0877ab7c7b54776d2fefcc3bde714449a206762425ae114b53c86"}, +] + [[package]] name = "tomli" version = "2.0.1" @@ -2585,6 +2680,31 @@ files = [ {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, ] +[[package]] +name = "types-pyyaml" +version = "6.0.12.20240917" +description = "Typing stubs for PyYAML" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-PyYAML-6.0.12.20240917.tar.gz", hash = "sha256:d1405a86f9576682234ef83bcb4e6fff7c9305c8b1fbad5e0bcd4f7dbdc9c587"}, + {file = "types_PyYAML-6.0.12.20240917-py3-none-any.whl", hash = "sha256:392b267f1c0fe6022952462bf5d6523f31e37f6cea49b14cee7ad634b6301570"}, +] + +[[package]] +name = "types-requests" +version = "2.32.0.20241016" +description = "Typing stubs for requests" +optional = false +python-versions = ">=3.8" +files = [ + {file = "types-requests-2.32.0.20241016.tar.gz", hash = "sha256:0d9cad2f27515d0e3e3da7134a1b6f28fb97129d86b867f24d9c726452634d95"}, + {file = "types_requests-2.32.0.20241016-py3-none-any.whl", hash = "sha256:4195d62d6d3e043a4eaaf08ff8a62184584d2e8684e9d2aa178c7915a7da3747"}, +] + +[package.dependencies] +urllib3 = ">=2" + [[package]] name = "typing-extensions" version = "4.11.0" @@ -2646,6 +2766,26 @@ h11 = ">=0.8" [package.extras] standard = ["colorama (>=0.4)", "httptools (>=0.4.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.0)"] +[[package]] +name = "virtualenv" +version = "20.27.0" +description = "Virtual Python Environment builder" +optional = false +python-versions = ">=3.8" +files = [ + {file = "virtualenv-20.27.0-py3-none-any.whl", hash = "sha256:44a72c29cceb0ee08f300b314848c86e57bf8d1f13107a5e671fb9274138d655"}, + {file = "virtualenv-20.27.0.tar.gz", hash = "sha256:2ca56a68ed615b8fe4326d11a0dca5dfbe8fd68510fb6c6349163bed3c15f2b2"}, +] + +[package.dependencies] +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +platformdirs = ">=3.9.1,<5" + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] + [[package]] name = "watchdog" version = "4.0.1" @@ -2924,4 +3064,4 @@ files = [ [metadata] lock-version = "2.0" python-versions = ">=3.10,<3.13" -content-hash = "461ddac451ab57dfa69cde450b47fda38b3832c9d556dd5eca1892e7c4a22679" +content-hash = "0b844b35020e06c90f5541d0914086396918162ddc0c46ba19d266556c94c182" diff --git a/pyproject.toml b/pyproject.toml index 8b0f912..e811986 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,7 +14,6 @@ packages = [{ include = "app" }] [tool.poetry.dependencies] python = ">=3.10,<3.13" -#chia-blockchain = { path = "./chia-blockchain", develop = true } fastapi = "^0.83.0" uvicorn = "^0.18.3" SQLAlchemy = "^1.4.41" @@ -25,6 +24,8 @@ pydantic = {version = "^1.10.2", extras = ["dotenv"]} chia-blockchain = "2.4.4" httpx = "^0.27.2" typing-inspect = "^0.9.0" +types-pyyaml = "^6.0.12.20240917" +types-requests = "^2.32.0.20241016" [tool.poetry.group.dev] optional = true @@ -32,13 +33,16 @@ optional = true [tool.poetry.group.dev.dependencies] # has to be disabled unfortunately due to developing chia-blockchain # chia-dev-tools = "^1.1.1" +# chia-blockchain = { path = "./chia-blockchain", develop = true } pytest = "^7.1.2" commitizen = "^2.27.1" pytest-xdist = "^3.3.1" black = "23.7.0" isort = "5.12.0" flake8 = "6.1.0" -mypy = "1.4.1" +mypy = "1.11.1" +pre-commit = "3.7.1" +pyupgrade = "3.16.0" [tool.commitizen] name = "cz_conventional_commits" diff --git a/tests/test_cat_lifecycle.py b/tests/test_cat_lifecycle.py index 0b60de3..5ac36c2 100644 --- a/tests/test_cat_lifecycle.py +++ b/tests/test_cat_lifecycle.py @@ -59,7 +59,7 @@ async def test_cat_lifecycle( melt_secret_key: PrivateKey = AugSchemeMPL.key_gen(secrets.token_bytes(64)) melt_public_key: G1Element = melt_secret_key.get_g1() - public_key_to_secret_key: Dict[bytes, PrivateKey] = { + public_key_to_secret_key: Dict[G1Element, PrivateKey] = { root_public_key: root_secret_key, mint_public_key: mint_secret_key, melt_public_key: melt_secret_key, @@ -67,7 +67,7 @@ async def test_cat_lifecycle( tail_program: Program = create_tail_program( public_key=root_public_key, - index=Program.to(["registry", "project", "vintage"]).get_tree_hash(), + index=Program.to(Program.to(["registry", "project", "vintage"]).get_tree_hash()), ) tail_program_hash: bytes32 = tail_program.get_tree_hash() diff --git a/tests/test_cat_workflow.py b/tests/test_cat_workflow.py index ec9d526..01dbe95 100644 --- a/tests/test_cat_workflow.py +++ b/tests/test_cat_workflow.py @@ -57,6 +57,7 @@ async def test_cat_tokenization_workflow( wallet_2: Wallet = env_2.xch_wallet fingerprint = (await wallet_client_1.get_logged_in_fingerprint()).fingerprint + assert fingerprint is not None result = await wallet_client_1.get_private_key(GetPrivateKey(fingerprint=fingerprint)) root_secret_key: PrivateKey = master_sk_to_root_sk(result.private_key.sk) @@ -71,7 +72,7 @@ async def test_cat_tokenization_workflow( root_secret_key=root_secret_key, wallet_client=wallet_client_1, ) - result = await climate_wallet_1.send_tokenization_transaction( + await climate_wallet_1.send_tokenization_transaction( to_puzzle_hash=await wallet_2.get_new_puzzlehash(), amount=amount, fee=fee, @@ -151,6 +152,7 @@ async def test_cat_detokenization_workflow( wallet_2: Wallet = env_2.xch_wallet fingerprint = (await wallet_client_1.get_logged_in_fingerprint()).fingerprint + assert fingerprint is not None result = await wallet_client_1.get_private_key(GetPrivateKey(fingerprint=fingerprint)) root_secret_key: PrivateKey = master_sk_to_root_sk(result.private_key.sk) @@ -163,7 +165,8 @@ async def test_cat_detokenization_workflow( root_secret_key=root_secret_key, wallet_client=wallet_client_1, ) - result = await climate_wallet_1.send_tokenization_transaction( + + await climate_wallet_1.send_tokenization_transaction( to_puzzle_hash=await wallet_2.get_new_puzzlehash(), amount=amount, fee=fee, @@ -220,22 +223,22 @@ async def test_cat_detokenization_workflow( wallet_client=wallet_client_2, constants=climate_wallet_1.constants, ) - result = await climate_wallet_2.create_detokenization_request( + detok_result = await climate_wallet_2.create_detokenization_request( amount=amount, fee=fee, wallet_id=env_2.wallet_aliases["cat"], ) - content: str = result["content"] + content: str = detok_result["content"] - result = await ClimateWallet.parse_detokenization_request( + detok_result = await ClimateWallet.parse_detokenization_request( content=content, ) - assert result["mode"] == GatewayMode.DETOKENIZATION - assert result["amount"] == amount - assert result["fee"] == fee - assert result["asset_id"] == climate_wallet_1.tail_program_hash + assert detok_result["mode"] == GatewayMode.DETOKENIZATION + assert detok_result["amount"] == amount + assert detok_result["fee"] == fee + assert detok_result["asset_id"] == climate_wallet_1.tail_program_hash - result = await climate_wallet_1.sign_and_send_detokenization_request( + detok_result = await climate_wallet_1.sign_and_send_detokenization_request( content=content, ) @@ -327,6 +330,7 @@ async def test_cat_permissionless_retirement_workflow( wallet_2: Wallet = env_2.xch_wallet fingerprint = (await wallet_client_1.get_logged_in_fingerprint()).fingerprint + assert fingerprint is not None result = await wallet_client_1.get_private_key(GetPrivateKey(fingerprint=fingerprint)) root_secret_key: PrivateKey = master_sk_to_root_sk(result.private_key.sk) @@ -335,7 +339,7 @@ async def test_cat_permissionless_retirement_workflow( root_secret_key=root_secret_key, wallet_client=wallet_client_1, ) - result = await climate_wallet_1.send_tokenization_transaction( + await climate_wallet_1.send_tokenization_transaction( to_puzzle_hash=await wallet_2.get_new_puzzlehash(), amount=amount, fee=fee, @@ -389,7 +393,7 @@ async def test_cat_permissionless_retirement_workflow( ) test_address = "This is a fake address".encode() - result = await climate_wallet_2.send_permissionless_retirement_transaction( + await climate_wallet_2.send_permissionless_retirement_transaction( amount=amount, fee=fee, beneficiary_name=beneficiary_name, diff --git a/tests/test_disallow.py b/tests/test_disallow.py index 39ef9b4..27cdf94 100644 --- a/tests/test_disallow.py +++ b/tests/test_disallow.py @@ -11,11 +11,11 @@ async def test_disallow() -> None: settings.MODE = ExecutionMode.DEV - @disallow([ExecutionMode.DEV]) + @disallow([ExecutionMode.DEV]) # type: ignore[misc] async def disallow_dev() -> int: return 5 - @disallow([ExecutionMode.REGISTRY]) + @disallow([ExecutionMode.REGISTRY]) # type: ignore[misc] async def allow_dev() -> int: return 5