diff --git a/.github/workflows/ci-test.yml b/.github/workflows/ci-test.yml index 5a669b53fd..7b433f811b 100644 --- a/.github/workflows/ci-test.yml +++ b/.github/workflows/ci-test.yml @@ -10,6 +10,12 @@ permissions: contents: read packages: write +env: + AWS_REGION: eu-central-1 + AWS_ROLE_ARN: arn:aws:iam::332405224602:role/ci + ECR_REGISTRY: 332405224602.dkr.ecr.eu-central-1.amazonaws.com + TAG: ${{ github.sha }} + jobs: build: name: CI Test @@ -17,21 +23,21 @@ jobs: env: FORCE_COLOR: 1 steps: - - name: Install Earthly - uses: earthly/actions/setup-earthly@v1 - with: - version: v0.7.0 - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 with: fetch-depth: 0 - - name: Login to GitHub Container Registry + - name: Setup CI + uses: input-output-hk/catalyst-ci/actions/setup@master + with: + aws_role_arn: ${{ env.AWS_ROLE_ARN }} + aws_region: ${{ env.AWS_REGION }} + earthly_runner_secret: ${{ secrets.EARTHLY_RUNNER_SECRET }} + - name: Login to ECR uses: docker/login-action@v2 with: - registry: ghcr.io - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} + registry: ${{ env.ECR_REGISTRY }} - name: Run tests env: EARTHLY_SECRETS: "IDEASCALE_EMAIL=${{ secrets.IDEASCALE_EMAIL }}, IDEASCALE_PASSWORD=${{ secrets.IDEASCALE_PASSWORD }}, IDEASCALE_API_TOKEN=${{ secrets.IDEASCALE_API_TOKEN }}" run: | - earthly -P --remote-cache=ghcr.io/${{ github.repository }}:cache +test + earthly -P --buildkit-host "tcp://${{ secrets.EARTHLY_SATELLITE_ADDRESS }}:8372" +test-all diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index eb47581226..0256a8299f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -2,110 +2,27 @@ name: CI on: push: - branches: - - main - paths: - - ".github/workflows/ci.yml" - - "containers/**" - - "services/**" - - "src/**" - - "utilities/**" permissions: id-token: write - contents: read + contents: write packages: write -env: - AWS_REGION: eu-central-1 - AWS_ROLE_ARN: arn:aws:iam::332405224602:role/ci - EARTHLY_TARGET: docker - EARTHLY_VERSION: 0.7.6 - ECR_REGISTRY: 332405224602.dkr.ecr.eu-central-1.amazonaws.com - TAG: ${{ github.sha }} - jobs: - discover: - runs-on: ubuntu-latest - outputs: - json: ${{ steps.discover.outputs.json}} - images: ${{ steps.discover.outputs.images}} - steps: - - uses: actions/checkout@v3 - - name: Setup CI - uses: input-output-hk/catalyst-ci/actions/setup@master - with: - aws_role_arn: ${{ env.AWS_ROLE_ARN }} - aws_region: ${{ env.AWS_REGION }} - earthly_version: ${{ env.EARTHLY_VERSION }} - - name: Discover Earthfiles - uses: input-output-hk/catalyst-ci/actions/discover@master - id: discover - with: - parse_images: "true" - targets: ${{ env.EARTHLY_TARGET }} - cache: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - name: Setup CI - uses: input-output-hk/catalyst-ci/actions/setup@master - with: - aws_role_arn: ${{ env.AWS_ROLE_ARN }} - aws_region: ${{ env.AWS_REGION }} - earthly_version: ${{ env.EARTHLY_VERSION }} - - name: Build cache - uses: input-output-hk/catalyst-ci/actions/build@master - with: - earthfile: . - earthly_satellite: ${{ secrets.EARTHLY_SATELLITE_ADDRESS }} - target: builder - build: - runs-on: ubuntu-latest - needs: [cache, discover] - strategy: - fail-fast: false - matrix: - earthfile: ${{ fromJson(needs.discover.outputs.json) }} - steps: - - uses: actions/checkout@v3 - - name: Setup CI - uses: input-output-hk/catalyst-ci/actions/setup@master - with: - aws_role_arn: ${{ env.AWS_ROLE_ARN }} - aws_region: ${{ env.AWS_REGION }} - earthly_version: ${{ env.EARTHLY_VERSION }} - - name: Login to ECR - uses: docker/login-action@v2 - with: - registry: ${{ env.ECR_REGISTRY }} - - name: Build and publish - uses: input-output-hk/catalyst-ci/actions/build@master - with: - earthfile: ${{ matrix.earthfile.path }} - earthly_satellite: ${{ secrets.EARTHLY_SATELLITE_ADDRESS }} - images: ${{ matrix.earthfile.images }} - publish: "true" - registry: ${{ env.ECR_REGISTRY }} - tags: "${{ env.TAG }}" - target: ${{ env.EARTHLY_TARGET }} - deploy: - runs-on: ubuntu-latest - needs: [discover, build] - steps: - - name: Setup CI - uses: input-output-hk/catalyst-ci/actions/setup@master - id: setup - with: - aws_role_arn: ${{ env.AWS_ROLE_ARN }} - aws_region: ${{ env.AWS_REGION }} - earthly_version: ${{ env.EARTHLY_VERSION }} - - name: Deploy - uses: input-output-hk/catalyst-ci/actions/deploy@master - with: - deployment_repo: input-output-hk/catalyst-world - # NOTE: For new services being deployed, this list must be updated - images: cat-data-service fragment-exporter migrations voting-node - environment: dev - tag: ${{ env.TAG }} - token: ${{ steps.setup.outputs.token }} + ci: + uses: input-output-hk/catalyst-ci/.github/workflows/ci.yml@master + with: + aws_ecr_registry: 332405224602.dkr.ecr.eu-central-1.amazonaws.com + aws_role_arn: arn:aws:iam::332405224602:role/ci + aws_region: eu-central-1 + deployment_images: | + cat-data-service + fragment-exporter + migrations + voting-node + secrets: + deployment_token: ${{ secrets.CI_BOT_TOKEN }} + dockerhub_token: ${{ secrets.DOCKERHUB_TOKEN }} + dockerhub_username: ${{ secrets.DOCKERHUB_USERNAME }} + earthly_runner_address: ${{ secrets.EARTHLY_SATELLITE_ADDRESS }} + earthly_runner_secret: ${{ secrets.EARTHLY_RUNNER_SECRET }} \ No newline at end of file diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index a3e89aff38..c0170711d5 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -127,7 +127,7 @@ jobs: # We are excluding cat-dat-service and event-db because we are already running it with Earthly - name: Build and archive tests - run: | + run: | cargo nextest archive \ --workspace \ --exclude vitup \ @@ -138,6 +138,10 @@ jobs: --exclude vit-servicing-station-server \ --exclude vit-servicing-station-tests \ --exclude vit-servicing-station-lib \ + --exclude vit-servicing-station-cli-f10 \ + --exclude vit-servicing-station-server-f10 \ + --exclude vit-servicing-station-tests-f10 \ + --exclude vit-servicing-station-lib-f10 \ --exclude cat-data-service \ --exclude event-db \ --exclude wallet-uniffi \ @@ -211,6 +215,8 @@ jobs: path: | target/debug/vit-servicing-station-cli target/debug/vit-servicing-station-server + target/debug/vit-servicing-station-cli-f10 + target/debug/vit-servicing-station-server-f10 target/debug/jcli target/debug/jormungandr target/debug/explorer @@ -221,7 +227,7 @@ jobs: - name: Install cargo-make run: cargo install --force cargo-make - + - name: Install refinery run: cargo install refinery_cli @@ -232,7 +238,7 @@ jobs: - name: Build external dependencies if: steps.deps-cache.outputs.cache-hit != 'true' run: - cargo build -p vit-servicing-station-cli -p vit-servicing-station-server -p jcli -p jormungandr -p explorer + cargo build -p vit-servicing-station-cli -p vit-servicing-station-server -p vit-servicing-station-cli-f10 -p vit-servicing-station-server-f10 -p jcli -p jormungandr -p explorer - name: Setup Event DB env: @@ -242,7 +248,7 @@ jobs: # We are excluding cat-dat-service and event-db because we are already running it with Earthly - name: Build and archive tests if: steps.archive-cache.outputs.cache-hit != 'true' - run: | + run: | cargo nextest archive \ --workspace \ --exclude vitup \ @@ -253,6 +259,10 @@ jobs: --exclude vit-servicing-station-server \ --exclude vit-servicing-station-tests \ --exclude vit-servicing-station-lib \ + --exclude vit-servicing-station-cli-f10 \ + --exclude vit-servicing-station-server-f10 \ + --exclude vit-servicing-station-tests-f10 \ + --exclude vit-servicing-station-lib-f10 \ --exclude cat-data-service \ --exclude event-db \ --exclude wallet-uniffi \ @@ -388,6 +398,6 @@ jobs: with: toolchain: ${{env.RUST_LATEST_STABLE_VERSION}} components: rustfmt, clippy - + - run: rustup component add clippy - run: scripts/check-fmt.sh diff --git a/.gitignore b/.gitignore index b3eb4c17b7..6a888d4651 100644 --- a/.gitignore +++ b/.gitignore @@ -107,6 +107,7 @@ result* .vscode **/.idea/ .temp/ +tests/tmp/ # std .std diff --git a/Cargo.lock b/Cargo.lock index e61ad3cd2c..c82fc4a8b3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -126,7 +126,7 @@ version = "0.7.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a824f2aa7e75a0c98c5a504fceb80649e9c35265d44525b5f94de4771a395cd" dependencies = [ - "getrandom 0.2.11", + "getrandom 0.2.10", "once_cell", "version_check", ] @@ -145,9 +145,9 @@ dependencies = [ [[package]] name = "aho-corasick" -version = "1.1.2" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0" +checksum = "ea5d730647d4fadd988536d06fecce94b7b4f2a7efdae548f1cf4b63205518ab" dependencies = [ "memchr", ] @@ -459,18 +459,18 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.37", ] [[package]] name = "async-trait" -version = "0.1.74" +version = "0.1.73" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a66537f1bb974b254c98ed142ff995236e81b9d0fe4db0575f46612cb15eb0f9" +checksum = "bc00ceb34980c03614e35a3a4e218276a0a824e911d07651cd0d858a51e8c0f0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.37", ] [[package]] @@ -497,7 +497,7 @@ dependencies = [ name = "audit" version = "0.1.0" dependencies = [ - "base64 0.21.5", + "base64 0.21.4", "bech32 0.8.1", "chain-addr", "chain-core", @@ -507,7 +507,7 @@ dependencies = [ "chain-storage", "chain-time", "chain-vote", - "clap 4.4.8", + "clap 4.4.6", "clap_complete_command", "color-eyre", "criterion", @@ -603,6 +603,12 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d27c3610c36aee21ce8ac510e6224498de4228ad772a171ed65643a24693a5a8" +[[package]] +name = "base64" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3441f0f7b02788e948e47f457ca01f1d7e6d92c693bc132c22b087d3141c03ff" + [[package]] name = "base64" version = "0.13.1" @@ -611,9 +617,9 @@ checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" [[package]] name = "base64" -version = "0.21.5" +version = "0.21.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35636a1494ede3b646cc98f74f8e62c773a38a659ebc777a2cf26b9b74171df9" +checksum = "9ba43ea6f343b788c8764558649e08df62f86c6ef251fdaeb1ffd010a9ae50a2" [[package]] name = "base64-url" @@ -621,7 +627,7 @@ version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c5b0a88aa36e9f095ee2e2b13fb8c5e4313e022783aedacc123328c0084916d" dependencies = [ - "base64 0.21.5", + "base64 0.21.4", ] [[package]] @@ -632,9 +638,9 @@ checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" [[package]] name = "basic-toml" -version = "0.1.7" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f2139706359229bfa8f19142ac1155b4b80beafb7a60471ac5dd109d4a19778" +checksum = "7bfc506e7a2370ec239e1d072507b2a80c833083699d3c6fa176fbb4de8448c6" dependencies = [ "serde", ] @@ -717,9 +723,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.4.1" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07" +checksum = "b4682ae6287fcf752ecaabbfcc7b6f9b72aa33933dc23a554d853aea8eea8635" [[package]] name = "bitvec" @@ -793,12 +799,12 @@ checksum = "771fe0050b883fcc3ea2359b1a96bcfbc090b7116eae7c3c512c7a083fdf23d3" [[package]] name = "bstr" -version = "1.8.0" +version = "1.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "542f33a8835a0884b006a0c3df3dadd99c0c3f296ed26c2fdc8028e01ad6230c" +checksum = "4c2f7349907b712260e64b0afe2f84692af14a454be26187d9df565c7f69266a" dependencies = [ "memchr", - "regex-automata 0.4.3", + "regex-automata 0.3.9", "serde", ] @@ -921,7 +927,7 @@ dependencies = [ "cryptoxide 0.4.4", "digest 0.9.0", "ed25519-bip32 0.4.1", - "getrandom 0.2.11", + "getrandom 0.2.10", "hex", "itertools 0.10.5", "js-sys", @@ -974,7 +980,7 @@ dependencies = [ "axum", "chain-impl-mockchain", "chrono", - "clap 4.4.8", + "clap 4.4.6", "event-db", "jormungandr-lib", "metrics", @@ -1008,7 +1014,7 @@ dependencies = [ "chain-storage", "chain-time", "chain-vote", - "clap 4.4.8", + "clap 4.4.6", "color-eyre", "csv", "fraction", @@ -1220,8 +1226,8 @@ dependencies = [ "serde", "serde_json", "sparse-array", - "strum", - "strum_macros", + "strum 0.24.1", + "strum_macros 0.24.3", "tempfile", "test-strategy", "thiserror", @@ -1290,7 +1296,7 @@ dependencies = [ name = "chain-vote" version = "0.1.0" dependencies = [ - "base64 0.21.5", + "base64 0.21.4", "cfg-if 1.0.0", "chain-core", "chain-crypto", @@ -1379,9 +1385,13 @@ version = "2.34.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a0610544180c38b88101fecf2dd634b174a62eef6946f84dfc6a7127512b381c" dependencies = [ + "ansi_term 0.12.1", + "atty", "bitflags 1.3.2", + "strsim 0.8.0", "textwrap 0.11.0", "unicode-width", + "vec_map", ] [[package]] @@ -1396,40 +1406,40 @@ dependencies = [ "clap_lex 0.2.4", "indexmap 1.9.3", "once_cell", - "strsim", + "strsim 0.10.0", "termcolor", "textwrap 0.16.0", ] [[package]] name = "clap" -version = "4.4.8" +version = "4.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2275f18819641850fa26c89acc84d465c1bf91ce57bc2748b28c420473352f64" +checksum = "d04704f56c2cde07f43e8e2c154b43f216dc5c92fc98ada720177362f953b956" dependencies = [ "clap_builder", - "clap_derive 4.4.7", + "clap_derive 4.4.2", ] [[package]] name = "clap_builder" -version = "4.4.8" +version = "4.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07cdf1b148b25c1e1f7a42225e30a0d99a615cd4637eae7365548dd4529b95bc" +checksum = "0e231faeaca65ebd1ea3c737966bf858971cd38c3849107aa3ea7de90a804e45" dependencies = [ "anstream", "anstyle", - "clap_lex 0.6.0", - "strsim", + "clap_lex 0.5.1", + "strsim 0.10.0", ] [[package]] name = "clap_complete" -version = "4.4.4" +version = "4.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bffe91f06a11b4b9420f62103854e90867812cd5d01557f853c5ee8e791b12ae" +checksum = "e3ae8ba90b9d8b007efe66e55e48fb936272f5ca00349b5b0e89877520d35ea7" dependencies = [ - "clap 4.4.8", + "clap 4.4.6", ] [[package]] @@ -1438,7 +1448,7 @@ version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "183495371ea78d4c9ff638bfc6497d46fed2396e4f9c50aebc1278a4a9919a3d" dependencies = [ - "clap 4.4.8", + "clap 4.4.6", "clap_complete", "clap_complete_fig", "clap_complete_nushell", @@ -1446,11 +1456,11 @@ dependencies = [ [[package]] name = "clap_complete_fig" -version = "4.4.2" +version = "4.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87e571d70e22ec91d34e1c5317c8308035a2280d925167646bf094fc5de1737c" +checksum = "29bdbe21a263b628f83fcbeac86a4416a1d588c7669dd41473bc4149e4e7d2f1" dependencies = [ - "clap 4.4.8", + "clap 4.4.6", "clap_complete", ] @@ -1460,7 +1470,7 @@ version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5d02bc8b1a18ee47c4d2eec3fb5ac034dc68ebea6125b1509e9ccdffcddce66e" dependencies = [ - "clap 4.4.8", + "clap 4.4.6", "clap_complete", ] @@ -1479,14 +1489,14 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.4.7" +version = "4.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf9804afaaf59a91e75b022a30fb7229a7901f60c755489cc61c9b423b836442" +checksum = "0862016ff20d69b84ef8247369fabf5c008a7417002411897d40ee1f4532b873" dependencies = [ "heck 0.4.1", "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.37", ] [[package]] @@ -1500,9 +1510,9 @@ dependencies = [ [[package]] name = "clap_lex" -version = "0.6.0" +version = "0.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "702fc72eb24e5a1e48ce58027a675bc24edd52096d5397d4aea7c6dd9eca0bd1" +checksum = "cd7cc57abe963c6d3b9d8be5b06ba7c8957a930305ca90304f24ef040aa6f961" [[package]] name = "clear_on_drop" @@ -1623,18 +1633,18 @@ dependencies = [ [[package]] name = "const_format" -version = "0.2.32" +version = "0.2.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3a214c7af3d04997541b18d432afaff4c455e79e2029079647e72fc2bd27673" +checksum = "c990efc7a285731f9a4378d81aff2f0e85a2c8781a05ef0f8baa8dac54d0ff48" dependencies = [ "const_format_proc_macros", ] [[package]] name = "const_format_proc_macros" -version = "0.2.32" +version = "0.2.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7f6ff08fd20f4f299298a28e2dfa8a8ba1036e6cd2460ac1de7b425d76f2500" +checksum = "e026b6ce194a874cb9cf32cd5772d1ef9767cc8fcb5765948d74f37a9d8b2bf6" dependencies = [ "proc-macro2", "quote", @@ -1665,9 +1675,9 @@ checksum = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa" [[package]] name = "cpufeatures" -version = "0.2.11" +version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce420fe07aecd3e67c5f910618fe65e94158f6dcc0adf44e00d69ce2bdfe0fd0" +checksum = "a17b76ff3a4162b0b27f354a0c87015ddad39d35f9c0c36607a3bdd175dde1f1" dependencies = [ "libc", ] @@ -1779,7 +1789,7 @@ version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f476fe445d41c9e991fd07515a6f463074b782242ccf4a5b7b1d1012e70824df" dependencies = [ - "bitflags 2.4.1", + "bitflags 2.4.0", "crossterm_winapi", "libc", "mio", @@ -1833,9 +1843,9 @@ checksum = "382ce8820a5bb815055d3553a610e8cb542b2d767bbacea99038afda96cd760d" [[package]] name = "csv" -version = "1.3.0" +version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac574ff4d437a7b5ad237ef331c17ccca63c46479e5b5453eb8e10bb99a759fe" +checksum = "626ae34994d3d8d668f4269922248239db4ae42d538b14c398b74a52208e8086" dependencies = [ "csv-core", "itoa", @@ -1957,7 +1967,7 @@ dependencies = [ "ident_case", "proc-macro2", "quote", - "strsim", + "strsim 0.10.0", "syn 1.0.109", ] @@ -1971,8 +1981,8 @@ dependencies = [ "ident_case", "proc-macro2", "quote", - "strsim", - "syn 2.0.39", + "strsim 0.10.0", + "syn 2.0.37", ] [[package]] @@ -1994,7 +2004,7 @@ checksum = "836a9bbc7ad63342d6d6e7b815ccab164bc77a2d95d84bc3117a8c0d5c98e2d5" dependencies = [ "darling_core 0.20.3", "quote", - "syn 2.0.39", + "syn 2.0.37", ] [[package]] @@ -2004,7 +2014,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856" dependencies = [ "cfg-if 1.0.0", - "hashbrown 0.14.2", + "hashbrown 0.14.1", "lock_api", "once_cell", "parking_lot_core 0.9.9", @@ -2037,20 +2047,13 @@ dependencies = [ [[package]] name = "deranged" -version = "0.3.9" +version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f32d04922c60427da6f9fef14d042d9edddef64cb9d4ce0d64d0685fbeb1fd3" +checksum = "f2696e8a945f658fd14dc3b87242e6b80cd0f36ff04ea560fa39082368847946" dependencies = [ - "powerfmt", "serde", ] -[[package]] -name = "deunicode" -version = "1.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a1abaf4d861455be59f64fd2b55606cb151fce304ede7165f410243ce96bde6" - [[package]] name = "dialoguer" version = "0.10.4" @@ -2073,6 +2076,7 @@ dependencies = [ "byteorder", "chrono", "diesel_derives", + "libsqlite3-sys", "pq-sys", "r2d2", "serde_json", @@ -2195,7 +2199,7 @@ checksum = "487585f4d0c6655fe74905e2504d8ad6908e4db67f744eb140876906c2f3175d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.37", ] [[package]] @@ -2218,9 +2222,9 @@ checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" [[package]] name = "dyn-clone" -version = "1.0.16" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "545b22097d44f8a9581187cdf93de7a71e4722bf51200cfaba810865b49a495d" +checksum = "23d2f3407d9a573d666de4b5bdf10569d73ca9478087346697dcbae6244bfbcd" [[package]] name = "eccoxide" @@ -2335,6 +2339,17 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" +[[package]] +name = "errno" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "add4f07d43996f76ef320709726a556a9d4f965d9410d8d0271132d2f8293480" +dependencies = [ + "errno-dragonfly", + "libc", + "windows-sys 0.48.0", +] + [[package]] name = "errno" version = "0.3.7" @@ -2379,7 +2394,7 @@ dependencies = [ "chain-ser", "chain-time", "chain-vote", - "clap 4.4.8", + "clap 4.4.6", "futures", "futures-channel", "futures-util", @@ -2425,14 +2440,15 @@ dependencies = [ [[package]] name = "fake" -version = "2.9.1" +version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26221445034074d46b276e13eb97a265ebdb8ed8da705c4dddd3dd20b66b45d2" +checksum = "9af7b0c58ac9d03169e27f080616ce9f64004edca3d2ef4147a811c21b23b319" dependencies = [ "chrono", "deunicode", "http", "rand 0.8.5", + "unidecode", "url-escape", ] @@ -2494,9 +2510,9 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" [[package]] name = "flate2" -version = "1.0.28" +version = "1.0.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46303f565772937ffe1d394a4fac6f411c6013172fadde9dcdb1e147a086940e" +checksum = "c6c98ee8095e9d1dcbf2fcc6d95acccb90d1c81db1e44725c6a984b1dbdfb010" dependencies = [ "crc32fast", "miniz_oxide 0.7.1", @@ -2517,6 +2533,21 @@ version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + [[package]] name = "form_urlencoded" version = "1.2.0" @@ -2640,7 +2671,7 @@ checksum = "53b153fd91e4b0147f4aced87be237c98248656bb01050b96bf3ee89220a8ddb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.37", ] [[package]] @@ -2721,9 +2752,9 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.11" +version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe9006bed769170c11f845cf00c7c1e9092aeb3f268e007c3e760ac68008070f" +checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" dependencies = [ "cfg-if 1.0.0", "js-sys", @@ -2892,9 +2923,9 @@ checksum = "8995bd73dd9ff926fdfe2b146e3e571d4b488488844561c9628cf7a736d973de" [[package]] name = "h2" -version = "0.3.22" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d6250322ef6e60f93f9a2162799302cd6f68f79f6e5d85c8c16f14d1d958178" +checksum = "91fc23aa11be92976ef4729127f1a74adf36d8436f7816b185d18df956790833" dependencies = [ "bytes", "fnv", @@ -2902,10 +2933,10 @@ dependencies = [ "futures-sink", "futures-util", "http", - "indexmap 2.1.0", + "indexmap 1.9.3", "slab", "tokio", - "tokio-util 0.7.10", + "tokio-util 0.7.9", "tracing", ] @@ -2939,7 +2970,16 @@ version = "0.14.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f93e7192158dbcda357bdec5fb5788eebf8bbac027f3f33e719d29135ae84156" dependencies = [ - "ahash 0.8.6", + "ahash 0.8.3", +] + +[[package]] +name = "hashbrown" +version = "0.14.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7dfda62a12f55daeae5015f81b0baea145391cb4520f86c248fc615d72640d12" +dependencies = [ + "ahash 0.8.3", "allocator-api2", ] @@ -2949,7 +2989,7 @@ version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7" dependencies = [ - "hashbrown 0.14.2", + "hashbrown 0.14.1", ] [[package]] @@ -2976,7 +3016,7 @@ version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06683b93020a07e3dbcf5f8c0f6d40080d725bea7936fc01ad345c01b97dc270" dependencies = [ - "base64 0.21.5", + "base64 0.21.4", "bytes", "headers-core", "http", @@ -3034,7 +3074,7 @@ dependencies = [ "chain-crypto", "chain-impl-mockchain", "chain-vote", - "clap 4.4.8", + "clap 4.4.6", "ctrlc", "custom_debug", "hex", @@ -3190,14 +3230,14 @@ dependencies = [ [[package]] name = "hyper-rustls" -version = "0.24.2" +version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" +checksum = "8d78e1e73ec14cf7375674f74d7dde185c8206fd9dea6fb6295e8a98098aaa97" dependencies = [ "futures-util", "http", "hyper", - "rustls 0.21.9", + "rustls 0.21.7", "tokio", "tokio-rustls 0.24.1", ] @@ -3214,11 +3254,24 @@ dependencies = [ "tokio-io-timeout", ] +[[package]] +name = "hyper-tls" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" +dependencies = [ + "bytes", + "hyper", + "native-tls", + "tokio", + "tokio-native-tls", +] + [[package]] name = "iana-time-zone" -version = "0.1.58" +version = "0.1.57" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8326b86b6cff230b97d0d312a6c40a60726df3332e721f72a1b035f451663b20" +checksum = "2fad5b825842d2b38bd206f3e81d6957625fd7f0a361e345c30e01a0ae2dd613" dependencies = [ "android_system_properties", "core-foundation-sys", @@ -3252,7 +3305,7 @@ dependencies = [ "chain-time", "chain-vote", "chrono", - "clap 4.4.8", + "clap 4.4.6", "cocoon", "console", "cryptoxide 0.4.4", @@ -3387,12 +3440,12 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.1.0" +version = "2.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d530e1a18b1cb4c484e6e34556a0d948706958449fca0cab753d649f2bce3d1f" +checksum = "8adf3ddd720272c6ea8bf59463c04e0f93d0bbf7c5439b691bca2987e0270897" dependencies = [ "equivalent", - "hashbrown 0.14.2", + "hashbrown 0.14.1", ] [[package]] @@ -3438,9 +3491,9 @@ dependencies = [ [[package]] name = "insta" -version = "1.34.0" +version = "1.33.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5d64600be34b2fcfc267740a243fa7744441bb4947a619ac4e5bb6507f35fbfc" +checksum = "1aa511b2e298cd49b1856746f6bb73e17036bcd66b25f5e92cdcdbec9bd75686" dependencies = [ "console", "lazy_static", @@ -3465,7 +3518,7 @@ version = "0.1.0" dependencies = [ "assert_cmd", "assert_fs", - "base64 0.21.5", + "base64 0.21.4", "catalyst-toolbox", "cfg-if 1.0.0", "chain-addr", @@ -3512,7 +3565,7 @@ version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b58db92f96b720de98181bbbe63c831e87005ab460c1bf306eb2622b4707997f" dependencies = [ - "socket2 0.5.5", + "socket2 0.5.4", "widestring", "windows-sys 0.48.0", "winreg", @@ -3520,9 +3573,9 @@ dependencies = [ [[package]] name = "ipnet" -version = "2.9.0" +version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3" +checksum = "28b29a3cd74f0f4598934efe3aeba42bae0eb4680554128851ebbecb02af14e6" [[package]] name = "is-terminal" @@ -3535,6 +3588,15 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "itertools" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "284f18f85651fe11e8a991b2adb42cb078325c996ed026d994719efcfca1d54b" +dependencies = [ + "either", +] + [[package]] name = "itertools" version = "0.10.5" @@ -3574,7 +3636,7 @@ dependencies = [ "chain-impl-mockchain", "chain-time", "chain-vote", - "clap 4.4.8", + "clap 4.4.6", "clap_complete", "ed25519-bip32 0.4.1", "gtmpl", @@ -3604,7 +3666,7 @@ checksum = "10bbdf445513bbe53f4666218b7057d265c76fa0b30475e121a6bf05dbaacaae" dependencies = [ "chrono", "cron", - "uuid 1.6.0", + "uuid 1.4.1", ] [[package]] @@ -3633,7 +3695,7 @@ dependencies = [ "chain-storage", "chain-time", "chain-vote", - "clap 4.4.8", + "clap 4.4.6", "criterion", "enum-as-inner", "futures", @@ -3702,7 +3764,7 @@ dependencies = [ "chain-storage", "chain-time", "chain-vote", - "clap 4.4.8", + "clap 4.4.6", "custom_debug", "flate2", "fs_extra", @@ -3732,8 +3794,8 @@ dependencies = [ "serde", "serde_derive", "serde_json", - "serde_yaml 0.8.26", - "strum", + "serde_yaml", + "strum 0.24.1", "sysinfo", "tar", "tempfile", @@ -4079,15 +4141,15 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.150" +version = "0.2.148" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89d92a4743f9a61002fae18374ed11e7973f530cb3a3255fb354818118b2203c" +checksum = "9cdc71e17332e86d2e1d38c1f99edcb6288ee11b815fb1a4b049eaa2114d369b" [[package]] name = "libm" -version = "0.2.8" +version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" +checksum = "f7012b1bbb0719e1097c47611d3898568c546d597c2e74d66f6087edd5233ff4" [[package]] name = "libmath" @@ -4099,14 +4161,14 @@ dependencies = [ ] [[package]] -name = "libredox" -version = "0.0.1" +name = "libsqlite3-sys" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85c833ca1e66078851dba29046874e38f08b2c883700aa29a03ddd3b23814ee8" +checksum = "0bb2c84bff2c4d43bf6866c786098f7b6a17714b0cbda3abc6323a6b7571a045" dependencies = [ - "bitflags 2.4.1", - "libc", - "redox_syscall 0.4.1", + "cc", + "pkg-config", + "vcpkg", ] [[package]] @@ -4117,9 +4179,9 @@ checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" [[package]] name = "linux-raw-sys" -version = "0.4.11" +version = "0.4.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "969488b55f8ac402214f3f5fd243ebb7206cf82de60d3172994707a4bcc2b829" +checksum = "3852614a3bd9ca9804678ba6be5e3b8ce76dfc902cae004e3e0c44051b6e88db" [[package]] name = "local-ip-address" @@ -4160,7 +4222,7 @@ dependencies = [ "chain-core", "chain-crypto", "chain-impl-mockchain", - "clap 4.4.8", + "clap 4.4.6", "custom_debug", "jormungandr-automation", "jormungandr-lib", @@ -4240,7 +4302,7 @@ dependencies = [ "bech32 0.8.1", "cardano-serialization-lib", "chain-impl-mockchain", - "clap 4.4.8", + "clap 4.4.6", "color-eyre", "futures", "futures-util", @@ -4265,7 +4327,7 @@ dependencies = [ "tokio", "tracing", "tracing-subscriber", - "uuid 1.6.0", + "uuid 1.4.1", "vit-servicing-station-lib", "vit-servicing-station-tests", "voting_tools_rs", @@ -4492,9 +4554,9 @@ dependencies = [ [[package]] name = "mio" -version = "0.8.9" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3dce281c5e46beae905d4de1870d8b1509a9142b62eedf18b443b011ca8343d0" +checksum = "927a765cd3fc26206e66b296465fa9d3e5ab003e651c1b3c060e7956d96b19d2" dependencies = [ "libc", "log", @@ -4514,7 +4576,7 @@ dependencies = [ "chain-storage", "chain-time", "chain-vote", - "clap 4.4.8", + "clap 4.4.6", "indicatif", "jormungandr-automation", "jormungandr-lib", @@ -4577,6 +4639,24 @@ version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a" +[[package]] +name = "native-tls" +version = "0.2.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "07226173c32f2926027b63cce4bcd8076c3552846cbe7925f3aaffeac0a3b92e" +dependencies = [ + "lazy_static", + "libc", + "log", + "openssl", + "openssl-probe", + "openssl-sys", + "schannel", + "security-framework", + "security-framework-sys", + "tempfile", +] + [[package]] name = "neli" version = "0.5.3" @@ -4621,7 +4701,7 @@ version = "0.27.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2eb04e9c688eff1c89d72b407f168cf79bb9e867a9d3323ed6c01519eb9cc053" dependencies = [ - "bitflags 2.4.1", + "bitflags 2.4.0", "cfg-if 1.0.0", "libc", ] @@ -4800,9 +4880,9 @@ dependencies = [ [[package]] name = "num-traits" -version = "0.2.17" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39e3200413f237f41ab11ad6d161bc7239c84dcb631773ccd7de3dfe4b5c267c" +checksum = "f30b0abd723be7e2ffca1272140fac1a2f084c77ec3e123c192b66af1ee9e6c2" dependencies = [ "autocfg", "libm", @@ -4860,6 +4940,50 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" +[[package]] +name = "openssl" +version = "0.10.58" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9dfc0783362704e97ef3bd24261995a699468440099ef95d869b4d9732f829a" +dependencies = [ + "bitflags 2.4.0", + "cfg-if 1.0.0", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.37", +] + +[[package]] +name = "openssl-probe" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" + +[[package]] +name = "openssl-sys" +version = "0.9.94" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f55da20b29f956fb01f0add8683eb26ee13ebe3ebd935e49898717c6b4b2830" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + [[package]] name = "opentelemetry" version = "0.18.0" @@ -4961,9 +5085,9 @@ dependencies = [ [[package]] name = "os_str_bytes" -version = "6.6.1" +version = "6.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2355d85b9a3786f481747ced0e0ff2ba35213a1f9bd406ed906554d7af805a1" +checksum = "4d5d9eb14b174ee9aa2ef96dc2b94637a2d4b6e7cb873c7e171f0c20c6cf3eac" [[package]] name = "output_vt100" @@ -5125,9 +5249,9 @@ checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94" [[package]] name = "pest" -version = "2.7.5" +version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae9cee2a55a544be8b89dc6848072af97a20f2422603c10865be2a42b580fff5" +checksum = "c022f1e7b65d6a24c0dbbd5fb344c66881bc01f3e5ae74a1c8100f2f985d98a4" dependencies = [ "memchr", "thiserror", @@ -5136,9 +5260,9 @@ dependencies = [ [[package]] name = "pest_meta" -version = "2.7.5" +version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c747191d4ad9e4a4ab9c8798f1e82a39affe7ef9648390b7e5548d18e099de6" +checksum = "1df74e9e7ec4053ceb980e7c0c8bd3594e977fde1af91daba9c928e8e8c6708d" dependencies = [ "once_cell", "pest", @@ -5147,9 +5271,9 @@ dependencies = [ [[package]] name = "pest_vm" -version = "2.7.5" +version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12d2b440b79b697ca2791334f1cae93409e398e04b206c92388b0ceaa0555453" +checksum = "e42d84ab5b383273ab842bdc50249b5fea1cec928bcf3338e7749113f25bab7a" dependencies = [ "pest", "pest_meta", @@ -5162,7 +5286,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e1d3afd2628e69da2be385eb6f2fd57c8ac7977ceeff6dc166ff1657b0e386a9" dependencies = [ "fixedbitset", - "indexmap 2.1.0", + "indexmap 2.0.2", ] [[package]] @@ -5210,7 +5334,7 @@ checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.37", ] [[package]] @@ -5338,7 +5462,7 @@ version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49b6c5ef183cd3ab4ba005f1ca64c21e8bd97ce4699cfea9e8d9a2c4958ca520" dependencies = [ - "base64 0.21.5", + "base64 0.21.4", "byteorder", "bytes", "fallible-iterator", @@ -5491,8 +5615,7 @@ version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919" dependencies = [ - "once_cell", - "toml_edit 0.19.15", + "toml 0.5.11", ] [[package]] @@ -5560,19 +5683,19 @@ dependencies = [ [[package]] name = "proptest" -version = "1.4.0" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31b476131c3c86cb68032fdc5cb6d5a1045e3e42d96b69fa599fd77701e1f5bf" +checksum = "7c003ac8c77cb07bb74f5f198bce836a689bcd5a42574612bf14d17bfd08c20e" dependencies = [ "bit-set", "bit-vec", - "bitflags 2.4.1", + "bitflags 2.4.0", "lazy_static", "num-traits", "rand 0.8.5", "rand_chacha 0.3.1", "rand_xorshift", - "regex-syntax 0.8.2", + "regex-syntax 0.7.5", "rusty-fork", "tempfile", "unarray", @@ -5900,7 +6023,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom 0.2.11", + "getrandom 0.2.10", ] [[package]] @@ -5992,23 +6115,14 @@ dependencies = [ "bitflags 1.3.2", ] -[[package]] -name = "redox_syscall" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" -dependencies = [ - "bitflags 1.3.2", -] - [[package]] name = "redox_users" version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a18479200779601e498ada4e8c1e1f50e3ee19deb0259c25825a98b5603b2cb4" dependencies = [ - "getrandom 0.2.11", - "libredox", + "getrandom 0.2.10", + "redox_syscall 0.2.16", "thiserror", ] @@ -6053,19 +6167,19 @@ dependencies = [ "quote", "refinery-core", "regex", - "syn 2.0.39", + "syn 2.0.37", ] [[package]] name = "regex" -version = "1.10.2" +version = "1.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "380b951a9c5e80ddfd6136919eef32310721aa4aacd4889a8d39124b026ab343" +checksum = "ebee201405406dbf528b8b672104ae6d6d63e6d118cb10e4d51abbc7b58044ff" dependencies = [ "aho-corasick", "memchr", - "regex-automata 0.4.3", - "regex-syntax 0.8.2", + "regex-automata 0.3.9", + "regex-syntax 0.7.5", ] [[package]] @@ -6079,13 +6193,13 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.3" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f804c7828047e88b2d32e2d7fe5a105da8ee3264f01902f796c8e067dc2483f" +checksum = "59b23e92ee4318893fa3fe3e6fb365258efbfe6ac6ab30f090cdcbb7aa37efa9" dependencies = [ "aho-corasick", "memchr", - "regex-syntax 0.8.2", + "regex-syntax 0.7.5", ] [[package]] @@ -6100,12 +6214,6 @@ version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dbb5fb1acd8a1a18b3dd5be62d25485eb770e05afb408a9627d14d451bae12da" -[[package]] -name = "regex-syntax" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" - [[package]] name = "remove_dir_all" version = "0.5.3" @@ -6126,11 +6234,11 @@ dependencies = [ [[package]] name = "reqwest" -version = "0.11.22" +version = "0.11.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "046cd98826c46c2ac8ddecae268eb5c2e58628688a5fc7a2643704a73faba95b" +checksum = "78fdbab6a7e1d7b13cc8ff10197f47986b41c639300cc3c8158cac7847c9bbef" dependencies = [ - "base64 0.21.5", + "base64 0.21.4", "bytes", "encoding_rs", "futures-core", @@ -6140,20 +6248,23 @@ dependencies = [ "http-body", "hyper", "hyper-rustls", + "hyper-tls", "ipnet", "js-sys", "log", "mime", + "native-tls", "once_cell", "percent-encoding", "pin-project-lite", - "rustls 0.21.9", + "rustls 0.21.7", "rustls-pemfile", "serde", "serde_json", "serde_urlencoded", "system-configuration", "tokio", + "tokio-native-tls", "tokio-rustls 0.24.1", "tower-service", "url", @@ -6217,7 +6328,7 @@ dependencies = [ "rkyv_derive", "seahash", "tinyvec", - "uuid 1.6.0", + "uuid 1.4.1", ] [[package]] @@ -6258,9 +6369,9 @@ dependencies = [ [[package]] name = "rust_decimal" -version = "1.33.1" +version = "1.32.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06676aec5ccb8fc1da723cc8c0f9a46549f21ebb8753d3915c6c41db1e7f1dc4" +checksum = "a4c4216490d5a413bc6d10fa4742bd7d4955941d062c0ef873141d6b0e7b30fd" dependencies = [ "arrayvec 0.7.4", "borsh", @@ -6276,9 +6387,9 @@ dependencies = [ [[package]] name = "rust_decimal_macros" -version = "1.33.1" +version = "1.32.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e43721f4ef7060ebc2c3ede757733209564ca8207f47674181bcd425dd76945" +checksum = "86444b802de0b10ac5e563b5ddb43b541b9705de4e01a50e82194d2b183c1835" dependencies = [ "quote", "rust_decimal", @@ -6307,11 +6418,11 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.25" +version = "0.38.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dc99bc2d4f1fed22595588a013687477aedf3cdcfb26558c559edb67b4d9b22e" +checksum = "d2f9da0cbd88f9f09e7814e388301c8414c51c62aa6ce1e4b5c551d49d96e531" dependencies = [ - "bitflags 2.4.1", + "bitflags 2.4.0", "errno", "libc", "linux-raw-sys", @@ -6332,33 +6443,33 @@ dependencies = [ [[package]] name = "rustls" -version = "0.21.9" +version = "0.21.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "629648aced5775d558af50b2b4c7b02983a04b312126d45eeead26e7caa498b9" +checksum = "cd8d6c9f025a446bc4d18ad9632e69aec8f287aa84499ee335599fabd20c3fd8" dependencies = [ "log", - "ring 0.17.5", + "ring", "rustls-webpki", "sct", ] [[package]] name = "rustls-pemfile" -version = "1.0.4" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" +checksum = "2d3987094b1d07b653b7dfdc3f70ce9a1da9c51ac18c1b06b662e4f9a0e9f4b2" dependencies = [ - "base64 0.21.5", + "base64 0.21.4", ] [[package]] name = "rustls-webpki" -version = "0.101.7" +version = "0.101.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" +checksum = "3c7d5dece342910d9ba34d259310cae3e0154b873b35408b787b59bce53d34fe" dependencies = [ - "ring 0.17.5", - "untrusted 0.9.0", + "ring", + "untrusted", ] [[package]] @@ -6394,6 +6505,15 @@ dependencies = [ "winapi-util", ] +[[package]] +name = "schannel" +version = "0.1.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c3733bf4cf7ea0880754e19cb5a462007c4a8c1914bff372ccc95b464f1df88" +dependencies = [ + "windows-sys 0.48.0", +] + [[package]] name = "scheduled-thread-pool" version = "0.2.7" @@ -6408,7 +6528,7 @@ name = "scheduler-service-lib" version = "0.1.0" dependencies = [ "chrono", - "clap 4.4.8", + "clap 4.4.6", "futures", "jortestkit", "reqwest", @@ -6418,16 +6538,16 @@ dependencies = [ "thiserror", "tokio", "tracing", - "uuid 1.6.0", + "uuid 1.4.1", "walkdir", "warp", ] [[package]] name = "schemars" -version = "0.8.16" +version = "0.8.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45a28f4c49489add4ce10783f7911893516f15afe45d015608d41faca6bc4d29" +checksum = "1f7b0ce13155372a76ee2e1c5ffba1fe61ede73fbea5630d61eee6fac4929c0c" dependencies = [ "dyn-clone", "schemars_derive", @@ -6437,9 +6557,9 @@ dependencies = [ [[package]] name = "schemars_derive" -version = "0.8.16" +version = "0.8.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c767fd6fa65d9ccf9cf026122c1b555f2ef9a4f0cea69da4d7dbc3e258d30967" +checksum = "e85e2a16b12bdb763244c69ab79363d71db2b4b918a2def53f80b02e0574b13c" dependencies = [ "proc-macro2", "quote", @@ -6482,7 +6602,7 @@ checksum = "1db149f81d46d2deba7cd3c50772474707729550221e69588478ebf9ada425ae" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.37", ] [[package]] @@ -6511,20 +6631,43 @@ dependencies = [ "zeroize", ] +[[package]] +name = "security-framework" +version = "2.9.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05b64fb303737d99b81884b2c63433e9ae28abebe5eb5045dcdd175dc2ecf4de" +dependencies = [ + "bitflags 1.3.2", + "core-foundation", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework-sys" +version = "2.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e932934257d3b408ed8f30db49d85ea163bfe74961f017f405b025af298f0c7a" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "semver" -version = "1.0.20" +version = "1.0.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "836fa6a3e1e547f9a2c4040802ec865b5d85f4014efe00555d7090a3dcaa1090" +checksum = "ad977052201c6de01a8ef2aa3378c4bd23217a056337d1d6da40468d267a4fb0" dependencies = [ "serde", ] [[package]] name = "serde" -version = "1.0.192" +version = "1.0.188" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bca2a08484b285dcb282d0f67b26cadc0df8b19f8c12502c13d966bf9482f001" +checksum = "cf9e0fcba69a370eed61bcf2b728575f726b50b55cba78064753d708ddc7549e" dependencies = [ "serde_derive", ] @@ -6563,13 +6706,13 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.192" +version = "1.0.188" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6c7207fbec9faa48073f3e3074cbe553af6ea512d7c21ba46e434e70ea9fbc1" +checksum = "4eca7ac642d82aa35b60049a6eccb4be6be75e599bd2e9adb5f875a737654af2" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.37", ] [[package]] @@ -6585,9 +6728,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.108" +version = "1.0.107" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d1c7e3eac408d115102c4c24ad393e0821bb3a5df4d506a80f85f7a742a526b" +checksum = "6b420ce6e3d8bd882e9b243c6eed35dbc9a6110c9769e74b584e0d68d1f20c65" dependencies = [ "itoa", "ryu", @@ -6606,9 +6749,9 @@ dependencies = [ [[package]] name = "serde_spanned" -version = "0.6.4" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12022b835073e5b11e90a14f86838ceb1c8fb0325b72416845c487ac0fa95e80" +checksum = "96426c9936fd7a0124915f9185ea1d20aa9445cc9821142f0a73bc9207a2e186" dependencies = [ "serde", ] @@ -6659,7 +6802,7 @@ dependencies = [ "darling 0.20.3", "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.37", ] [[package]] @@ -6745,9 +6888,9 @@ dependencies = [ [[package]] name = "sharded-slab" -version = "0.1.7" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" +checksum = "c1b21f559e07218024e7e9f90f96f601825397de0e25420135f7f952453fed0b" dependencies = [ "lazy_static", ] @@ -6758,6 +6901,36 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24188a676b6ae68c3b2cb3a01be17fbf7240ce009799bb56d5b1409051e78fde" +[[package]] +name = "sign" +version = "0.1.0" +dependencies = [ + "bech32 0.8.1", + "chain-addr", + "chain-core", + "chain-crypto", + "chain-impl-mockchain", + "chain-ser", + "chain-storage", + "chain-vote", + "clap 4.4.6", + "clap_complete_command", + "color-eyre", + "cryptoxide 0.4.4", + "csv", + "ed25519-dalek", + "hex", + "jormungandr-lib", + "rand 0.8.5", + "rand_chacha 0.3.1", + "rand_core 0.5.1", + "reqwest", + "serde", + "serde_json", + "serde_yaml", + "thiserror", +] + [[package]] name = "signal-hook" version = "0.3.17" @@ -6890,9 +7063,9 @@ dependencies = [ [[package]] name = "smallvec" -version = "1.11.2" +version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4dccd0940a2dcdf68d092b8cbab7dc0ad8fa938bf95787e1b916b0e3d0e8e970" +checksum = "942b4a808e05215192e39f4ab80813e599068285906cc91aa64f923db842bd5a" [[package]] name = "smoke" @@ -6933,7 +7106,7 @@ dependencies = [ "catalyst-toolbox", "chain-addr", "chrono", - "clap 4.4.8", + "clap 4.4.6", "futures", "hex", "jormungandr-lib", @@ -6949,7 +7122,7 @@ dependencies = [ "thiserror", "tokio", "tracing", - "uuid 1.6.0", + "uuid 1.4.1", "voting_tools_rs", "walkdir", "warp", @@ -6967,9 +7140,9 @@ dependencies = [ [[package]] name = "socket2" -version = "0.5.5" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b5fac59a5cb5dd637972e5fca70daf0523c9067fcdc4842f053dae04a18f8e9" +checksum = "4031e820eb552adee9295814c0ced9e5cf38ddf1e8b7d566d6de8e2538ea989e" dependencies = [ "libc", "windows-sys 0.48.0", @@ -7036,6 +7209,12 @@ dependencies = [ "unicode-normalization", ] +[[package]] +name = "strsim" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a" + [[package]] name = "strsim" version = "0.10.0" @@ -7065,13 +7244,55 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "structopt" +version = "0.3.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c6b5c64445ba8094a6ab0c3cd2ad323e07171012d9c98b0b15651daf1787a10" +dependencies = [ + "clap 2.34.0", + "lazy_static", + "structopt-derive", +] + +[[package]] +name = "structopt-derive" +version = "0.4.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dcb5ae327f9cc13b68763b5749770cb9e048a99bd9dfdfa58d0cf05d5f64afe0" +dependencies = [ + "heck 0.3.3", + "proc-macro-error", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "strum" +version = "0.21.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aaf86bbcfd1fa9670b7a129f64fc0c9fcbbfe4f1bc4210e9e98fe71ffc12cde2" + [[package]] name = "strum" version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f" dependencies = [ - "strum_macros", + "strum_macros 0.24.3", +] + +[[package]] +name = "strum_macros" +version = "0.21.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d06aaeeee809dbc59eb4556183dd927df67db1540de5be8d3ec0b6636358a5ec" +dependencies = [ + "heck 0.3.3", + "proc-macro2", + "quote", + "syn 1.0.109", ] [[package]] @@ -7123,9 +7344,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.39" +version = "2.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23e78b90f2fcf45d3e842032ce32e3f2d1545ba6636271dcbf24fa306d87be7a" +checksum = "7303ef2c05cd654186cb250d29049a24840ca25d2747c25c0381c8d9e2f582e8" dependencies = [ "proc-macro2", "quote", @@ -7227,9 +7448,9 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.8.1" +version = "3.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ef1adac450ad7f4b3c28589471ade84f25f731a7a0fe30d71dfa9f60fd808e5" +checksum = "cb94d2f3cc536af71caac6b6fcebf65860b347e7ce0cc9ebe8f70d3e521054ef" dependencies = [ "cfg-if 1.0.0", "fastrand", @@ -7297,22 +7518,22 @@ checksum = "222a222a5bfe1bba4a77b45ec488a741b3cb8872e5e499451fd7d0129c9c7c3d" [[package]] name = "thiserror" -version = "1.0.50" +version = "1.0.49" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9a7210f5c9a7156bb50aa36aed4c95afb51df0df00713949448cf9e97d382d2" +checksum = "1177e8c6d7ede7afde3585fd2513e611227efd6481bd78d2e82ba1ce16557ed4" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.50" +version = "1.0.49" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "266b2e40bc00e5a6c09c3584011e08b06f123c00362c92b975ba9843aaaa14b8" +checksum = "10712f02019e9288794769fba95cd6847df9874d49d871d062172f9dd41bc4cc" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.37", ] [[package]] @@ -7327,7 +7548,7 @@ dependencies = [ "chain-crypto", "chain-impl-mockchain", "chain-vote", - "clap 4.4.8", + "clap 4.4.6", "cocoon", "custom_debug", "dirs", @@ -7368,9 +7589,9 @@ dependencies = [ [[package]] name = "time" -version = "0.3.30" +version = "0.3.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4a34ab300f2dee6e562c10a046fc05e358b29f9bf92277f30c3c8d82275f6f5" +checksum = "426f806f4089c493dcac0d24c29c01e2c38baf8e30f1b716ee37e83d200b18fe" dependencies = [ "deranged", "itoa", @@ -7424,9 +7645,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.34.0" +version = "1.32.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0c014766411e834f7af5b8f4cf46257aab4036ca95e9d2c144a10f59ad6f5b9" +checksum = "17ed6077ed6cd6c74735e21f37eb16dc3935f96878b1fe961074089cc80893f9" dependencies = [ "backtrace", "bytes", @@ -7436,7 +7657,7 @@ dependencies = [ "parking_lot 0.12.1", "pin-project-lite", "signal-hook-registry", - "socket2 0.5.5", + "socket2 0.5.4", "tokio-macros", "windows-sys 0.48.0", ] @@ -7453,13 +7674,23 @@ dependencies = [ [[package]] name = "tokio-macros" -version = "2.2.0" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" +checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.37", +] + +[[package]] +name = "tokio-native-tls" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +dependencies = [ + "native-tls", + "tokio", ] [[package]] @@ -7482,9 +7713,9 @@ dependencies = [ "postgres-protocol", "postgres-types", "rand 0.8.5", - "socket2 0.5.5", + "socket2 0.5.4", "tokio", - "tokio-util 0.7.10", + "tokio-util 0.7.9", "whoami", ] @@ -7505,7 +7736,7 @@ version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" dependencies = [ - "rustls 0.21.9", + "rustls 0.21.7", "tokio", ] @@ -7518,7 +7749,7 @@ dependencies = [ "futures-core", "pin-project-lite", "tokio", - "tokio-util 0.7.10", + "tokio-util 0.7.9", ] [[package]] @@ -7550,9 +7781,9 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.10" +version = "0.7.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5419f34732d9eb6ee4c3578b7989078579b7f039cbbb9ca2c4da015749371e15" +checksum = "1d68074620f57a0b21594d9735eb2e98ab38b17f80d3fcb189fca266771ca60d" dependencies = [ "bytes", "futures-core", @@ -7580,14 +7811,14 @@ dependencies = [ "serde", "serde_spanned", "toml_datetime", - "toml_edit 0.19.15", + "toml_edit", ] [[package]] name = "toml_datetime" -version = "0.6.5" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3550f4e9685620ac18a50ed434eb3aec30db8ba93b0287467bca5826ea25baf1" +checksum = "7cda73e2f1397b1262d6dfdcef8aafae14d1de7748d66822d3bfeeb6d03e5e4b" dependencies = [ "serde", ] @@ -7598,24 +7829,13 @@ version = "0.19.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" dependencies = [ - "indexmap 2.1.0", + "indexmap 2.0.2", "serde", "serde_spanned", "toml_datetime", "winnow", ] -[[package]] -name = "toml_edit" -version = "0.20.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70f427fce4d84c72b5b732388bf4a9f4531b53f74e2887e3ecb2481f68f66d81" -dependencies = [ - "indexmap 2.1.0", - "toml_datetime", - "winnow", -] - [[package]] name = "tonic" version = "0.6.2" @@ -7671,7 +7891,7 @@ dependencies = [ "prost-derive 0.11.9", "tokio", "tokio-stream", - "tokio-util 0.7.10", + "tokio-util 0.7.9", "tower", "tower-layer", "tower-service", @@ -7718,7 +7938,7 @@ dependencies = [ "rand 0.8.5", "slab", "tokio", - "tokio-util 0.7.10", + "tokio-util 0.7.9", "tower-layer", "tower-service", "tracing", @@ -7730,7 +7950,7 @@ version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "61c5bb1d698276a2443e5ecfabc1008bf15a36c12e6a7176e7bf089ea9131140" dependencies = [ - "bitflags 2.4.1", + "bitflags 2.4.0", "bytes", "futures-core", "futures-util", @@ -7773,27 +7993,26 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3566e8ce28cc0a3fe42519fc80e6b4c943cc4c8cef275620eb8dac2d3d4e06cf" dependencies = [ "crossbeam-channel", - "thiserror", "time", "tracing-subscriber", ] [[package]] name = "tracing-attributes" -version = "0.1.27" +version = "0.1.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" +checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.37", ] [[package]] name = "tracing-core" -version = "0.1.32" +version = "0.1.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" +checksum = "0955b8137a1df6f1a2e9a37d8a6656291ff0297c1a97c24e0d8425fe2312f79a" dependencies = [ "once_cell", "valuable", @@ -7897,9 +8116,9 @@ dependencies = [ [[package]] name = "tracing-subscriber" -version = "0.3.18" +version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" +checksum = "30a651bc37f915e81f087d86e62a18eec5f79550c7faff886f7090b4ea757c77" dependencies = [ "matchers", "nu-ansi-term", @@ -8101,6 +8320,12 @@ version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" +[[package]] +name = "unidecode" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "402bb19d8e03f1d1a7450e2bd613980869438e0666331be3e073089124aa1adc" + [[package]] name = "uniffi" version = "0.21.1" @@ -8300,17 +8525,17 @@ version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bc5cf98d8186244414c848017f0e2676b3fcb46807f6668a97dfe67359a3c4b7" dependencies = [ - "getrandom 0.2.11", + "getrandom 0.2.10", "serde", ] [[package]] name = "uuid" -version = "1.6.0" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c58fe91d841bc04822c9801002db4ea904b9e4b8e6bbad25127b46eff8dc516b" +checksum = "79daa5ed5740825c40b389c5e50312b9c86df53fccd33f281df655642b43869d" dependencies = [ - "getrandom 0.2.11", + "getrandom 0.2.10", "serde", ] @@ -8325,7 +8550,7 @@ dependencies = [ "chain-ser", "chain-time", "chain-vote", - "clap 4.4.8", + "clap 4.4.6", "hex", "hyper", "itertools 0.10.5", @@ -8365,6 +8590,12 @@ version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" +[[package]] +name = "vec_map" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191" + [[package]] name = "version_check" version = "0.9.4" @@ -8385,7 +8616,7 @@ name = "vit-servicing-station-cli" version = "0.3.4-dev" dependencies = [ "base64 0.13.1", - "clap 4.4.8", + "clap 4.4.6", "csv", "diesel", "diesel_migrations", @@ -8401,6 +8632,25 @@ dependencies = [ "vit-servicing-station-tests", ] +[[package]] +name = "vit-servicing-station-cli-f10" +version = "0.5.0" +dependencies = [ + "base64 0.12.3", + "csv", + "diesel", + "diesel_migrations", + "r2d2", + "rand 0.7.3", + "serde", + "serde_json", + "structopt", + "tempfile", + "thiserror", + "time", + "vit-servicing-station-lib-f10", +] + [[package]] name = "vit-servicing-station-lib" version = "0.3.4-dev" @@ -8408,7 +8658,7 @@ dependencies = [ "async-trait", "base64 0.13.1", "chain-ser", - "clap 4.4.8", + "clap 4.4.6", "diesel", "diesel_migrations", "dotenv", @@ -8426,8 +8676,8 @@ dependencies = [ "serde_json", "simplelog 0.8.0", "snapshot-lib", - "strum", - "strum_macros", + "strum 0.24.1", + "strum_macros 0.24.3", "tempfile", "thiserror", "time", @@ -8440,11 +8690,44 @@ dependencies = [ "warp", ] +[[package]] +name = "vit-servicing-station-lib-f10" +version = "0.5.0" +dependencies = [ + "async-trait", + "base64 0.12.3", + "diesel", + "diesel_migrations", + "dotenv", + "eccoxide 0.2.0", + "http-zipkin", + "itertools 0.9.0", + "jormungandr-lib", + "libsqlite3-sys", + "log", + "notify", + "rand 0.8.5", + "serde", + "serde_json", + "simplelog 0.8.0", + "structopt", + "strum 0.21.0", + "strum_macros 0.21.1", + "tempfile", + "thiserror", + "time", + "tokio", + "tracing", + "tracing-futures", + "tracing-subscriber", + "warp", +] + [[package]] name = "vit-servicing-station-server" version = "0.3.4-dev" dependencies = [ - "clap 4.4.8", + "clap 4.4.6", "log", "opentelemetry", "opentelemetry-otlp", @@ -8460,6 +8743,20 @@ dependencies = [ "vit-servicing-station-lib", ] +[[package]] +name = "vit-servicing-station-server-f10" +version = "0.5.0" +dependencies = [ + "log", + "structopt", + "tokio", + "tracing", + "tracing-appender", + "tracing-futures", + "tracing-subscriber", + "vit-servicing-station-lib-f10", +] + [[package]] name = "vit-servicing-station-tests" version = "0.3.4-dev" @@ -8472,7 +8769,7 @@ dependencies = [ "chain-crypto", "chain-impl-mockchain", "chrono", - "clap 4.4.8", + "clap 4.4.6", "diesel", "dyn-clone", "fake", @@ -8502,6 +8799,44 @@ dependencies = [ "vit-servicing-station-lib", ] +[[package]] +name = "vit-servicing-station-tests-f10" +version = "0.5.0" +dependencies = [ + "assert_cmd", + "assert_fs", + "base64 0.12.3", + "cfg-if 0.1.10", + "chain-addr", + "chain-crypto", + "chain-impl-mockchain", + "diesel", + "diesel_migrations", + "dyn-clone", + "fake", + "hyper", + "itertools 0.10.5", + "jortestkit", + "lazy_static", + "libsqlite3-sys", + "predicates 2.1.5", + "pretty_assertions 0.6.1", + "quickcheck", + "quickcheck_macros", + "rand 0.7.3", + "rand_core 0.5.1", + "reqwest", + "serde", + "serde_json", + "structopt", + "tempfile", + "thiserror", + "time", + "tokio", + "url", + "vit-servicing-station-lib-f10", +] + [[package]] name = "vitup" version = "0.0.1" @@ -8516,7 +8851,7 @@ dependencies = [ "chain-impl-mockchain", "chain-time", "chain-vote", - "clap 4.4.8", + "clap 4.4.6", "console", "csv", "ctrlc", @@ -8558,8 +8893,8 @@ dependencies = [ "slave-pool", "snapshot-lib", "snapshot-trigger-service", - "strum", - "strum_macros", + "strum 0.24.1", + "strum_macros 0.24.3", "tempdir", "thiserror", "thor", @@ -8597,7 +8932,7 @@ dependencies = [ "cddl", "chrono", "ciborium", - "clap 4.4.8", + "clap 4.4.6", "color-eyre", "cryptoxide 0.4.4", "dashmap", @@ -8720,7 +9055,7 @@ dependencies = [ "chain-vote", "clear_on_drop", "console_error_panic_hook", - "getrandom 0.2.11", + "getrandom 0.2.10", "hex", "js-sys", "rand 0.8.5", @@ -8769,7 +9104,7 @@ dependencies = [ "tokio-rustls 0.24.1", "tokio-stream", "tokio-tungstenite", - "tokio-util 0.7.10", + "tokio-util 0.7.9", "tower-service", "tracing", ] @@ -8827,7 +9162,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.37", "wasm-bindgen-shared", ] @@ -8861,7 +9196,7 @@ checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.37", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -8908,9 +9243,9 @@ dependencies = [ [[package]] name = "webpki" -version = "0.22.4" +version = "0.22.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed63aea5ce73d0ff405984102c42de94fc55a6b75765d621c65262469b3c9b53" +checksum = "07ecc0cd7cac091bf682ec5efa18b1cff79d617b84181f38b3951dbe135f607f" dependencies = [ "ring 0.17.5", "untrusted 0.9.0", @@ -9154,9 +9489,9 @@ checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" [[package]] name = "winnow" -version = "0.5.19" +version = "0.5.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "829846f3e3db426d4cee4510841b71a8e58aa2a76b1132579487ae430ccd9c7b" +checksum = "7c2e3184b9c4e92ad5167ca73039d0c42476302ab603e2fec4487511f38ccefc" dependencies = [ "memchr", ] @@ -9241,7 +9576,7 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.39", + "syn 2.0.37", ] [[package]] @@ -9307,9 +9642,9 @@ dependencies = [ [[package]] name = "zstd-sys" -version = "2.0.9+zstd.1.5.5" +version = "2.0.8+zstd.1.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e16efa8a874a0481a574084d34cc26fdb3b99627480f785888deb6386506656" +checksum = "5556e6ee25d32df2586c098bbfa278803692a20d0ab9565e049480d52707ec8c" dependencies = [ "cc", "pkg-config", diff --git a/Cargo.toml b/Cargo.toml index cedc6d281d..bccc7e4375 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -54,6 +54,11 @@ members = [ "src/voting-tools-rs", "src/cat-data-service", "src/audit", + "src/vit-servicing-station-f10/vit-servicing-station-cli-f10", + "src/vit-servicing-station-f10/vit-servicing-station-lib-f10", + "src/vit-servicing-station-f10/vit-servicing-station-server-f10", + "src/vit-servicing-station-f10/vit-servicing-station-tests-f10", + "src/sign", ] [workspace.dependencies] diff --git a/Earthfile b/Earthfile index fd509a7a31..97e3ed2890 100644 --- a/Earthfile +++ b/Earthfile @@ -32,6 +32,7 @@ build-cache: libsqlite3-dev \ protobuf-compiler + RUN cargo chef cook --release SAVE ARTIFACT target SAVE ARTIFACT $CARGO_HOME cargo_home @@ -64,6 +65,7 @@ builder: libpq-dev \ libsqlite3-dev \ protobuf-compiler + RUN rustup component add rustfmt COPY --dir book src tests Cargo.lock Cargo.toml . COPY +build-cache/cargo_home $CARGO_HOME @@ -89,16 +91,16 @@ all: END # Build and tag all Docker images - BUILD ./containers/event-db-migrations+docker --tag=$tag --registry=$registry_final + BUILD ./containers/event-db-migrations+publish --tag=$tag --registry=$registry_final # Build crate images from the workspace BUILD ./src/jormungandr/jormungandr+docker --tag=$tag --registry=$registry_final BUILD ./src/jormungandr/jcli+docker --tag=$tag --registry=$registry_final BUILD ./src/catalyst-toolbox/catalyst-toolbox+docker --tag=$tag --registry=$registry_final BUILD ./src/voting-tools-rs+docker --tag=$tag --registry=$registry_final - BUILD ./src/cat-data-service+docker --tag=$tag --registry=$registry_final + BUILD ./src/cat-data-service+publish --tag=$tag --registry=$registry_final - BUILD ./services/voting-node+docker --tag=$tag --registry=$registry_final + BUILD ./services/voting-node+publish --tag=$tag --registry=$registry_final BUILD ./utilities/ideascale-importer+docker --tag=$tag --registry=$registry_final all-with-tags: @@ -120,10 +122,11 @@ ci: BUILD ./containers/event-db-migrations+test # Define the test stage, which runs the Rust project's tests -test: - BUILD ./src/event-db+test - BUILD ./src/cat-data-service+test - BUILD ./utilities/ideascale-importer+test +test-all: +# TODO: Enable this when CI supports passing -P dynamically +# BUILD ./src/event-db+test +# BUILD ./src/cat-data-service+test +# BUILD ./utilities/ideascale-importer+test tag-workspace: ARG SVU_VERSION=1.10.2 @@ -143,9 +146,9 @@ tag-workspace: local: LOCALLY - BUILD ./containers/event-db-migrations+docker - BUILD ./src/cat-data-service+docker - BUILD ./services/voting-node+docker + BUILD ./containers/event-db-migrations+publish + BUILD ./src/cat-data-service+publish + BUILD ./services/voting-node+publish RUN mkdir -p ./local COPY ./containers/dev-local+build/docker-compose.yml ./local/ diff --git a/book/src/10_prometheus_metrics/00.md b/book/src/10_prometheus_metrics/00.md new file mode 100644 index 0000000000..7c8b799d61 --- /dev/null +++ b/book/src/10_prometheus_metrics/00.md @@ -0,0 +1 @@ +# Prometheus Metrics diff --git a/book/src/10_prometheus_metrics/01_description.md b/book/src/10_prometheus_metrics/01_description.md new file mode 100644 index 0000000000..4b5e585cfb --- /dev/null +++ b/book/src/10_prometheus_metrics/01_description.md @@ -0,0 +1,145 @@ +# `jormungandr` Prometheus Metrics + +`jormungadr` uses Prometheus metrics to gather information about the node at runtime. + +## Fragment Mempool Process + +As the node receives fragments, they are inserted into the fragment mempool, and propagated into the peer network. + +### `txRecvCnt` + + >> tx_recv_cnt: IntCounter, + +Total number of tx inserted and propagated by the mempool at each loop in the process. + +### `txRejectedCnt` + + >> tx_rejected_cnt: IntCounter, + +Total number of tx rejected by the mempool at each loop in the process. + +### `mempoolTxCount` + + >> mempool_tx_count: UIntGauge, + +Total number of tx in the mempool for a given block + +### `mempoolUsageRatio` + + >> mempool_usage_ratio: Gauge, + +Mempool usage ratio for a given block + +## Topology Process + +As the node connects to peers, the network topology allows for gossip and p2p communication. Nodes can join or leave the network. + +### `peerConnectedCnt` + + >> peer_connected_cnt: UIntGauge, + +The total number of connected peers. + +### `peerQuarantinedCnt` + + >> peer_quarantined_cnt: UIntGauge, + +The total number of quarantined peers. + +### `peerAvailableCnt` + + >> peer_available_cnt: UIntGauge, + +The total number of available peers. + +### `peerTotalCnt` + + >> peer_total_cnt: UIntGauge, + +The total number of peers. + +## Blockchain Process + +Each node receives blocks streamed from the network which are processed in order to create a new block tip. + +### `blockRecvCnt` + + >> block_recv_cnt: IntCounter, + +This is the total number of blocks streamed from the network that will be processed at each loop in the process. + +## Blockchain Tip-Block Process + +As the node sets the tip-block, this happens when the node is started and during the block minting process, these metrics are updated. + +### `votesCasted` + + >> votes_casted_cnt: IntCounter, + +The total number accepted `VoteCast` fragments. Metric is incremented by the total number of valid `VoteCast` fragments +in the block tip. + +### `lastBlockTx` + + >> // Total number of tx for a given block + >> block_tx_count: IntCounter, + +The total number of valid transaction fragments in the block tip. + +### `lastBlockInputTime` <--- **misnomer** + + >> block_input_sum: UIntGauge, + +The total sum of transaction input values in the block tip. The `tx.total_input()` is added for every fragment. + +### `lastBlockSum` + + >> block_fee_sum: UIntGauge, + +The total sum of transaction output values (fees) in the block tip. The `tx.total_output()` is added for every fragment. + +### `lastBlockContentSize` + + >> block_content_size: UIntGauge, + +The total size in bytes of the sum of the transaction content in the block tip. + +### `lastBlockEpoch` + + >> block_epoch: UIntGauge, + +The epoch of the block date defined in the block tip header. + +### `lastBlockSlot` + + >> block_slot: UIntGauge, + +The slot of the block date defined in the block tip header. + +### `lastBlockHeight` + + >> block_chain_length: UIntGauge, + +Length of the blockchain. + +### `lastBlockDate` + + >> block_time: UIntGauge, + +Timestamp in seconds of the block date. + +## Unused metrics + +### `lastReceivedBlockTime` + + >> slot_start_time: UIntGauge, + +This metric is never updated. + +## Unclear metrics + +### `lastBlockHashPiece` + + >> block_hash: Vec, + +A vector of gauges that does something to with the block hash. Metric is updated when `http_response` is called. diff --git a/containers/event-db-migrations/Earthfile b/containers/event-db-migrations/Earthfile index afc8885937..09aaabe6d5 100644 --- a/containers/event-db-migrations/Earthfile +++ b/containers/event-db-migrations/Earthfile @@ -1,18 +1,20 @@ VERSION 0.7 -build: +deps: FROM ../../+rust-toolchain +build: + FROM +deps + # Build refinery RUN cargo install refinery_cli --version 0.8.7 --root . SAVE ARTIFACT ./bin/refinery refinery SAVE IMAGE --cache-hint -docker: - FROM ../../+deployment +publish: + FROM debian:stable-slim ARG tag="latest" - ARG registry ARG data="historic" WORKDIR /eventdb @@ -41,23 +43,23 @@ docker: COPY --dir ../../src/event-db+build/migrations ./migrations IF [ "$data" = "historic" ] COPY --dir ../../src/event-db+build/historic_data ./historic_data + COPY ../../src/event-db+build/stage_data ./stage_data ELSE IF [ "$data" = "test" ] COPY --dir ../../src/event-db+build/test_data ./test_data END COPY ../../src/event-db+build/refinery.toml . - COPY ../../src/event-db+build/stage_data ./stage_data VOLUME /eventdb/tmp COPY ./entry.sh . RUN chmod ugo+x ./entry.sh ENTRYPOINT ["./entry.sh"] + SAVE IMAGE migrations:$tag - # Push the container... - SAVE IMAGE --push ${registry}migrations:$tag - -test: - WITH DOCKER \ - --load test:latest=+docker - RUN docker run test:latest - END +# TODO: Enable this when CI supports passing -P dynamically +# test: +# FROM earthly/dind:alpine +# WITH DOCKER \ +# --load test:latest=+docker +# RUN docker run test:latest +# END diff --git a/scripts/tally/Readme.md b/scripts/tally/Readme.md new file mode 100644 index 0000000000..69a24734a6 --- /dev/null +++ b/scripts/tally/Readme.md @@ -0,0 +1,8 @@ +# How to use tally script +### Offline +To run the offline tally script we need the committe key and the `active_plans.json` (voteplans encrypted file) to be in the folder where the script is executed. The committee key is generated by the vitup tool together with the other backend artifacts, the `active_plans.json` can be retrived from the api with `curl https://{url}/api/v0/vote/active/plans > active_plans.json` after vote ended. Also we need the jcli tool to be installed. We can then run the script passing the voteplan id of the voteplan we want to decrypt `./private_offline.sh 9a278b6f788278e5cd8dfd6de8b8b8699a7f6b4847c680843de6c02d5b3169b2` + + +### Online +To run the online tally script we need the committe key to be in the folder where the script is executed. We need to set the env variable `JORMUNGANDR_RESTAPI_URL` in the script. Also we need the jcli tool to be installed. We can then run the script passing the voteplan index of the voteplan we want to decrypt and the expiry block date. The expiry block date is obtained taking the field `"lastBlockDate"` from the response of `https://{url}/api/v0/node/stats` and rounding that up. So if `"lastBlockDate": "25.779"` the script would be called like `./private.sh 0 26.0` +The online tally script needs to be runned after voting ends and before tally time ends. The script will post the decrypted result back to the node as a transaction and it will appear in the `https://{url}/api/v0/vote/active/plans` diff --git a/services/voting-node/Earthfile b/services/voting-node/Earthfile index 771f9bd80a..8b3325d208 100644 --- a/services/voting-node/Earthfile +++ b/services/voting-node/Earthfile @@ -26,7 +26,7 @@ builder: SAVE IMAGE --cache-hint # Install external python dependencies -install-deps: +deps: FROM +builder # Set the working directory @@ -51,7 +51,7 @@ install-deps: # Build for distribution build: - FROM +install-deps + FROM +deps # Build the distribution wheels and save them as artifacts RUN poetry export --without-hashes -f requirements.txt --output requirements.txt RUN poetry build --no-cache -f wheel @@ -63,7 +63,7 @@ build: # Build for development build-dev: - FROM +install-deps + FROM +deps RUN poetry install COPY tests tests COPY README.md leader0-schedule.md snapshot-data.md . @@ -90,10 +90,9 @@ pdoc: SAVE ARTIFACT /doc # Docker image built for distribution and use in production. -docker: +publish: FROM python:3.11-slim-bullseye ARG tag="latest" - ARG registry # Install voting-node system dependencies RUN apt-get update && \ @@ -133,8 +132,7 @@ docker: # Set the default command to run the main script ENTRYPOINT ["/app/entry.sh"] - - SAVE IMAGE --push ${registry}voting-node:$tag + SAVE IMAGE voting-node:$tag # Docker image built for development and testing. Do not use in production. docker-dev: diff --git a/services/voting-node/voting_node/importer.py b/services/voting-node/voting_node/importer.py index 224c2e6f67..c4e17b4415 100644 --- a/services/voting-node/voting_node/importer.py +++ b/services/voting-node/voting_node/importer.py @@ -162,22 +162,14 @@ def _remaining_intervals_n_seconds_to_next_snapshot(self, current_time: datetime async def _ideascale_snapshot(self, event_id: int) -> None: """Call the 'ideascale-importer ideascale import-all ' command.""" - try: - # Initialize external data importer - importer = ExternalDataImporter() - await importer.ideascale_import_all(event_id) - # raise Exception("ideascale import is DISABLED. Skipping...") - except Exception as e: - logger.error(f"snapshot: {e}") + importer = ExternalDataImporter() + await importer.ideascale_import_all(event_id) + # raise Exception("ideascale import is DISABLED. Skipping...") async def _dbsync_snapshot(self, event_id: int) -> None: """Call the 'ideascale-importer snapshot import ' command.""" - try: - # Initialize external data importer - importer = ExternalDataImporter() - await importer.snapshot_import(event_id) - except Exception as e: - logger.error(f"snapshot: {e}") + importer = ExternalDataImporter() + await importer.snapshot_import(event_id) async def take_snapshots(self, event_id: int) -> None: """Takes snapshots at regular intervals using ExternalDataImporter. diff --git a/services/voting-node/voting_node/main.py b/services/voting-node/voting_node/main.py index efe413f6e8..0361251227 100644 --- a/services/voting-node/voting_node/main.py +++ b/services/voting-node/voting_node/main.py @@ -3,6 +3,8 @@ Main entrypoint for executing the voting node service from the shell command-line. """ +import json +import logging import click import uvicorn from ideascale_importer.utils import configure_logger @@ -30,7 +32,7 @@ def voting_node_cli(): """Deploy a jormungandr node for voting events.""" -@click.command() +@click.command(context_settings={"show_default": True}) @click.option( "--reloadable", is_flag=True, @@ -50,7 +52,7 @@ def voting_node_cli(): default="0.0.0.0", help="""Host for the voting node API. - If left unset it will look for envvar `VOTING_HOST`. If no host is found, the default value is: 0.0.0.0""", + If left unset it will look for envvar `VOTING_HOST`.""", ) @click.option( "--api-port", @@ -58,7 +60,7 @@ def voting_node_cli(): default=8000, help="""Port for the voting node API. - If left unset it will look for envvar `VOTING_PORT`. If no port is found, the default value is: 8000""", + If left unset it will look for envvar `VOTING_PORT`.""", ) @click.option( "--log-level", @@ -67,16 +69,16 @@ def voting_node_cli(): type=click.Choice(["info", "debug", "warn", "error", "trace"]), help="""Set the level for logs in the voting node. - If left unset it will look for envvar `VOTING_LOG_LEVEL`. If no level is found, the default value is: info""", + If left unset it will look for envvar `VOTING_LOG_LEVEL`.""", ) @click.option( "--log-format", envvar=VOTING_LOG_FORMAT, - default="text", + default="json", type=click.Choice(["text", "json"]), help="""Set the format for logs in the voting node. - If left unset it will look for envvar `VOTING_LOG_FORMAT`. If no format is found, the default value is: text""", + If left unset it will look for envvar `VOTING_LOG_FORMAT`.""", ) @click.option( "--database-url", diff --git a/src/audit/Cargo.toml b/src/audit/Cargo.toml index e45f375257..ddf9c97fc7 100644 --- a/src/audit/Cargo.toml +++ b/src/audit/Cargo.toml @@ -36,7 +36,6 @@ tracing.workspace = true tracing-subscriber.workspace = true rand = "0.8.3" - [dev-dependencies] rand_chacha = "0.3" smoke = "^0.2.1" diff --git a/src/audit/src/find/README.md b/src/audit/src/find/README.md index 288e6993ec..425a6cc8ba 100644 --- a/src/audit/src/find/README.md +++ b/src/audit/src/find/README.md @@ -20,3 +20,35 @@ FRAGMENTS_STORAGE=/tmp/fund9-leader-1/persist/leader-1 ``` +### Aggregrate all voter keys and write to file +```bash + +FRAGMENTS_STORAGE=/tmp/fund9-leader-1/persist/leader-1 +./target/release/find --fragments $FRAGMENTS_STORAGE --aggregate true + +``` + +### Convert key formats +```bash + +VOTING_KEY='e5b0a5c250f78b574b8b17283bcc6c7692f72fc58090f4a0a2362497d28d1a85' + +./target/release/find --key-to-convert $VOTING_KEY + + +VOTING_KEY='ca1q0uftf4873xazhmhqrrqg4kfx7fmzfqlm5w80wake5lu3fxjfjxpk6wv3f7' + +./target/release/find --key-to-convert $VOTING_KEY + +``` + +### Check a batch of keys presented in a file format and write key metadata to a file. +```bash + +KEY_FILE='/tmp/keyfile.txt' +FRAGMENTS_STORAGE=/tmp/fund9-leader-1/persist/leader-1 + +./target/release/find --fragments $FRAGMENTS_STORAGE --key-file $KEY_FILE + +``` + diff --git a/src/audit/src/find/bin/main.rs b/src/audit/src/find/bin/main.rs index 4380af9770..6f64243d4b 100644 --- a/src/audit/src/find/bin/main.rs +++ b/src/audit/src/find/bin/main.rs @@ -3,7 +3,7 @@ //! use clap::Parser; -use lib::find::find_vote; +use lib::find::{all_voters, batch_key_check, convert_key_formats, find_vote}; use tracing::{info, Level}; use color_eyre::Result; @@ -20,10 +20,19 @@ use std::{error::Error, path::PathBuf}; pub struct Args { /// Obtain fragments by providing path to historical fund data. #[clap(short, long)] - pub fragments: String, + pub fragments: Option, /// voting key + #[clap(short, long, requires = "fragments")] + voting_key: Option, + /// aggregate voting keys + #[clap(short, long, requires = "fragments")] + aggregate: Option, + ///convert key formats #[clap(short, long)] - voting_key: String, + key_to_convert: Option, + /// check batch of keys and write history to file + #[clap(short, long, requires = "fragments")] + key_file: Option, } fn main() -> Result<(), Box> { @@ -49,31 +58,87 @@ fn main() -> Result<(), Box> { info!("Audit Tool."); info!("Find my vote"); - // Load and replay fund fragments from storage - let storage_path = PathBuf::from(args.fragments); - - // all fragments including tally fragments - info!("finding vote history of voter {:?}", args.voting_key); - - let matched_votes = find_vote(&storage_path, args.voting_key.clone())?; - - // record of casters votes - let matched_votes_path = PathBuf::from("/tmp/offline") - .with_extension(format!("voting_history_of_{}.json", args.voting_key)); - - let file = File::options() - .write(true) - .create(true) - .truncate(true) - .open(matched_votes_path.clone())?; - let writer = BufWriter::new(file); - - info!( - "writing voting history of voter {:?} to {:?}", - args.voting_key, matched_votes_path - ); - - serde_json::to_writer_pretty(writer, &matched_votes)?; + if let Some(voting_key) = args.voting_key { + // Load and replay fund fragments from storage + let storage_path = PathBuf::from( + args.fragments + .clone() + .expect("enforced by clap: infallible"), + ); + + // all fragments including tally fragments + info!("finding vote history of voter {:?}", voting_key); + + let matched_votes = find_vote(&storage_path, voting_key.clone())?; + + // record of casters votes + let matched_votes_path = PathBuf::from("/tmp/offline") + .with_extension(format!("voting_history_of_{}.json", voting_key)); + + let file = File::options() + .write(true) + .create(true) + .truncate(true) + .open(matched_votes_path.clone())?; + let writer = BufWriter::new(file); + + info!( + "writing voting history of voter {:?} to {:?}", + voting_key, matched_votes_path + ); + + serde_json::to_writer_pretty(writer, &matched_votes)?; + } + + if let Some(_aggregate) = args.aggregate { + // Load and replay fund fragments from storage + let storage_path = PathBuf::from( + args.fragments + .clone() + .expect("enforced by clap: infallible"), + ); + + info!("collecting all voting keys in ca and 0x format"); + + let (unique_voters_ca, unique_voters_0x) = all_voters(&storage_path)?; + + let voters_file_0x = + PathBuf::from("/tmp/inspect").with_extension("validated_voters_0x.json"); + let voters_file_ca = + PathBuf::from("/tmp/inspect").with_extension("validated_voters_ca.json"); + + let file = File::options() + .write(true) + .create(true) + .truncate(true) + .open(voters_file_ca) + .unwrap(); + let writer = BufWriter::new(file); + + serde_json::to_writer_pretty(writer, &unique_voters_ca)?; + + let file = File::options() + .write(true) + .create(true) + .truncate(true) + .open(voters_file_0x) + .unwrap(); + let writer = BufWriter::new(file); + + serde_json::to_writer_pretty(writer, &unique_voters_0x)?; + + info!("keys written to /tmp/inspect/validated_voters_*.json"); + } + + if let Some(keyfile) = args.key_file { + let storage_path = PathBuf::from(args.fragments.expect("enforced by clap: infallible")); + batch_key_check(&storage_path, keyfile)?; + } + + if let Some(voting_key) = args.key_to_convert { + let converted_key = convert_key_formats(voting_key)?; + info!("Converted key: {}", converted_key); + } Ok(()) } diff --git a/src/audit/src/lib/find.rs b/src/audit/src/lib/find.rs index 48fd818bcb..8abb9d9b07 100644 --- a/src/audit/src/lib/find.rs +++ b/src/audit/src/lib/find.rs @@ -1,5 +1,5 @@ use bech32::{self, FromBase32}; -use chain_addr::{Address, Kind}; +use chain_addr::{Address, AddressReadable, Kind}; use chain_crypto::{Ed25519, PublicKey}; use chain_impl_mockchain::account; @@ -10,15 +10,21 @@ use chain_impl_mockchain::{ block::Block, chaintypes::HeaderId, fragment::Fragment, transaction::InputEnum, }; -use tracing::error; +use tracing::{error, info}; use jormungandr_lib::interfaces::AccountIdentifier; const MAIN_TAG: &str = "HEAD"; -use std::path::Path; +use std::{ + collections::{HashMap, HashSet}, + error, + fs::{read_to_string, File}, + io::BufWriter, + path::{Path, PathBuf}, +}; -use crate::offline::Vote; +use crate::offline::{extract_fragments_from_storage, Vote}; #[derive(Debug, thiserror::Error)] pub enum Error { @@ -177,8 +183,123 @@ pub fn find_vote(jormungandr_database: &Path, voting_key: String) -> Result Result<(HashSet, HashSet), Box> { + let fragments = extract_fragments_from_storage(jormungandr_database)?; + + let mut unique_voters_ca = HashSet::new(); + let mut unique_voters_0x = HashSet::new(); + + for fragment in fragments { + if let Fragment::VoteCast(tx) = fragment.clone() { + let input = tx.as_slice().inputs().iter().next().unwrap().to_enum(); + let caster = if let InputEnum::AccountInput(account_id, _value) = input { + AccountIdentifier::from(account_id).into_address(Discrimination::Production, "ca") + } else { + error!("Corrupted fragment {:?}", fragment); + continue; + }; + + unique_voters_ca.insert(caster.to_string().clone()); + + let voting_key_61824_format = AddressReadable::from_string("ca", &caster.to_string()) + .expect("infallible") + .to_address(); + + let voting_key = voting_key_61824_format + .public_key() + .expect("infallible") + .to_string(); + unique_voters_0x.insert(voting_key); + } + } + + info!("unique voters ca {:?}", unique_voters_ca.len()); + info!("unique voters 0x {:?}", unique_voters_0x.len()); + + Ok((unique_voters_ca, unique_voters_0x)) +} + +/// convert keys from ca to 0x and vice versa +pub fn convert_key_formats(voting_key: String) -> Result> { + if voting_key.starts_with("ca") { + let voting_key_61824_format = AddressReadable::from_string("ca", &voting_key)?.to_address(); + + let voting_key = voting_key_61824_format + .public_key() + .expect("addr to pub key is infallible") + .to_string(); + + Ok(voting_key) + } else { + // we need to convert this to our internal key representation + let decoded_voting_key = hex::decode(voting_key)?; + let voting_key: PublicKey = PublicKey::from_binary(&decoded_voting_key)?; + let addr = Address(Discrimination::Production, Kind::Single(voting_key.clone())); + let addr_readable = AddressReadable::from_address("ca", &addr); + + Ok(addr_readable.to_string()) + } +} + +/// read voter keys from file +pub fn read_lines(filename: &str) -> Result, Box> { + let mut result = Vec::new(); + + for line in read_to_string(filename)?.lines() { + result.push(line.to_string()) + } + + Ok(result) +} + +/// check key history of multiple keys and write metadata to file +pub fn batch_key_check( + jormungandr_database: &Path, + key_file: String, +) -> Result<(), Box> { + let mut flagged_keys = HashMap::new(); + + let keys = read_lines(&key_file)?; + + for key in keys { + let voting_key_61824_format = AddressReadable::from_string("ca", &key) + .expect("infallible") + .to_address(); + + let voting_key = voting_key_61824_format + .public_key() + .expect("infallible") + .to_string(); + + let votes = find_vote(jormungandr_database, voting_key)?; + + flagged_keys.insert(key.clone(), votes.clone()); + + info!("Inserted: key: {} vote: {:?}", key, votes); + } + + let flagged_file = PathBuf::from("/tmp/inspect").with_extension("flag_keys.json"); + + let file = File::options() + .write(true) + .create(true) + .truncate(true) + .open(flagged_file.clone())?; + let writer = BufWriter::new(file); + + serde_json::to_writer_pretty(writer, &flagged_keys)?; + + info!("flagged keys and metadata saved here {:?}", flagged_file); + + Ok(()) +} + #[cfg(test)] mod tests { + use std::path::PathBuf; use chain_addr::{Address, AddressReadable, Discrimination, Kind}; @@ -186,6 +307,23 @@ mod tests { use crate::find::find_vote; + use super::convert_key_formats; + + #[test] + fn test_key_conversion() { + let voting_key_0x = + "f895a6a7f44dd15f7700c60456c93793b1241fdd1c77bbb6cd3fc8a4d24c8c1b".to_string(); + + let converted_key = convert_key_formats(voting_key_0x.clone()).unwrap(); + + let voting_key_ca = + "ca1q0uftf4873xazhmhqrrqg4kfx7fmzfqlm5w80wake5lu3fxjfjxpk6wv3f7".to_string(); + + assert_eq!(converted_key, voting_key_ca,); + + assert_eq!(convert_key_formats(voting_key_ca).unwrap(), voting_key_0x); + } + #[test] #[ignore] fn test_account_parser() { diff --git a/src/audit/src/lib/offline.rs b/src/audit/src/lib/offline.rs index 8a2bd7b257..4ca89025fa 100644 --- a/src/audit/src/lib/offline.rs +++ b/src/audit/src/lib/offline.rs @@ -38,7 +38,7 @@ pub enum Error { CorruptedFragments, } -#[derive(Serialize, Debug)] +#[derive(Serialize, Debug, Clone)] pub struct Vote { pub fragment_id: String, pub caster: Address, diff --git a/src/cat-data-service/Earthfile b/src/cat-data-service/Earthfile index 6847e79675..abd0906752 100644 --- a/src/cat-data-service/Earthfile +++ b/src/cat-data-service/Earthfile @@ -1,38 +1,45 @@ VERSION 0.7 -build: +deps: FROM ../../+builder + +build: + FROM +deps RUN cargo build --locked --release --bin cat-data-service --features jorm-mock # Store the artifact SAVE ARTIFACT target/release/cat-data-service cat-data-service SAVE IMAGE --cache-hint -docker: - FROM ../../+deployment +# TODO: Enable this when CI supports passing -P dynamically +# test: +# FROM earthly/dind:alpine + +# COPY ../../src/event-db+docker-compose/docker-compose.yml docker-compose.yml +# WITH DOCKER \ +# --compose docker-compose.yml \ +# --pull postgres:14 \ +# --load migrations:latest=(../../containers/event-db-migrations+docker --data=test) \ +# --load test:latest=(../../+builder) \ +# --service migrations \ +# --allow-privileged +# RUN docker run \ +# --network default_default \ +# -e EVENT_DB_URL="postgres://catalyst-event-dev:CHANGE_ME@postgres/CatalystEventDev" \ +# test:latest \ +# cargo test -p cat-data-service --all-features +# END + +publish: + FROM debian:stable-slim WORKDIR /app ARG tag="latest" - ARG registry COPY +build/cat-data-service . COPY entry.sh . RUN chmod +x entry.sh ENTRYPOINT ["/app/entry.sh"] - SAVE IMAGE --push ${registry}cat-data-service:$tag - -# Need to be run with the -P flag -test: - FROM ../../+builder - - COPY ../../src/event-db+docker-compose/docker-compose.yml docker-compose.yml - WITH DOCKER \ - --compose docker-compose.yml \ - --pull postgres:14 \ - --load migrations:latest=(../../containers/event-db-migrations+docker --data=test) \ - --service migrations \ - --allow-privileged - RUN EVENT_DB_URL="postgres://catalyst-event-dev:CHANGE_ME@localhost/CatalystEventDev" cargo test -p cat-data-service --all-features - END + SAVE IMAGE cat-data-service:$tag diff --git a/src/catalyst-toolbox/catalyst-toolbox/src/proposal_score/mod.rs b/src/catalyst-toolbox/catalyst-toolbox/src/proposal_score/mod.rs index 6aff32305f..787a1aed2a 100644 --- a/src/catalyst-toolbox/catalyst-toolbox/src/proposal_score/mod.rs +++ b/src/catalyst-toolbox/catalyst-toolbox/src/proposal_score/mod.rs @@ -93,11 +93,13 @@ fn weighted_avarage_score( let allocated_weight = review_weight(allocated_weight, allocated_count); let not_allocated_weight = review_weight(not_allocated_weight, not_allocated_count); - let res = (total_allocated_rating as f64 * allocated_weight + let mut res = (total_allocated_rating as f64 * allocated_weight + total_not_allocated_rating as f64 * not_allocated_weight) / (allocated_weight * allocated_count as f64 + not_allocated_weight * not_allocated_count as f64); + // round to 1 decimal place + res = (10.0 * res).round() / 10.0; Ok(res) } @@ -119,7 +121,7 @@ mod tests { } #[test] - fn weighted_score_test() { + fn weighted_score_test_1() { let allocated_weight = 0.8; let not_allocated_weight = 0.2; @@ -161,6 +163,90 @@ mod tests { assert!(weighted_avarage_score(0.5, 0.6, &[]).is_err()); } + #[test] + fn weighted_score_test_2() { + let allocated_weight = 0.7; + let not_allocated_weight = 0.3; + + let reviews = vec![ + Review { + rating: 1, + allocated: false, + }, + Review { + rating: 2, + allocated: false, + }, + Review { + rating: 3, + allocated: false, + }, + Review { + rating: 4, + allocated: false, + }, + Review { + rating: 5, + allocated: false, + }, + Review { + rating: 6, + allocated: true, + }, + Review { + rating: 8, + allocated: true, + }, + ]; + + let result = + weighted_avarage_score(allocated_weight, not_allocated_weight, &reviews).unwrap(); + // To be precise the result should be `5.799999999999999`, but we are rounding to 1 decimal place + assert_eq!(result, 5.8); + } + + #[test] + fn weighted_score_test_3() { + let allocated_weight = 0.7; + let not_allocated_weight = 0.3; + + let reviews = vec![ + Review { + rating: 1, + allocated: false, + }, + Review { + rating: 2, + allocated: false, + }, + Review { + rating: 3, + allocated: false, + }, + Review { + rating: 4, + allocated: false, + }, + Review { + rating: 5, + allocated: false, + }, + Review { + rating: 6, + allocated: true, + }, + Review { + rating: 7, + allocated: true, + }, + ]; + + let result = + weighted_avarage_score(allocated_weight, not_allocated_weight, &reviews).unwrap(); + // To be precise the result should be `5.449999999999999`, but we are rounding to 1 decimal place + assert_eq!(result, 5.4); + } + #[test] fn full_test() { let allocated_weight = 0.8; diff --git a/src/chain-libs/chain-impl-mockchain/src/transaction/transaction.rs b/src/chain-libs/chain-impl-mockchain/src/transaction/transaction.rs index 6b147493f8..f45e7e7271 100644 --- a/src/chain-libs/chain-impl-mockchain/src/transaction/transaction.rs +++ b/src/chain-libs/chain-impl-mockchain/src/transaction/transaction.rs @@ -265,6 +265,7 @@ pub(super) struct TransactionStruct { /// Verify the structure of the transaction and return all the offsets fn get_spine(slice: &[u8]) -> Result { let sz = slice.len(); + let mut codec = Codec::new(slice); // read payload diff --git a/src/chain-libs/chain-vote/src/cryptography/zkps/unit_vector/zkp.rs b/src/chain-libs/chain-vote/src/cryptography/zkps/unit_vector/zkp.rs index 914114bb3f..d5d2369c73 100644 --- a/src/chain-libs/chain-vote/src/cryptography/zkps/unit_vector/zkp.rs +++ b/src/chain-libs/chain-vote/src/cryptography/zkps/unit_vector/zkp.rs @@ -293,6 +293,17 @@ impl Zkp { self.ibas.iter() } + /// Return announcement commitments group elements + pub fn announcments_group_elements(&self) -> Vec { + let mut announcements = Vec::new(); + for g in self.ibas.clone() { + announcements.push(g.i); + announcements.push(g.b); + announcements.push(g.a) + } + announcements + } + /// Return an iterator of the encryptions of the polynomial coefficients pub fn ds(&self) -> impl Iterator { self.ds.iter() @@ -303,6 +314,18 @@ impl Zkp { self.zwvs.iter() } + /// Return an iterator of the response related to the randomness + pub fn response_randomness_group_elements(&self) -> Vec { + let mut response = Vec::new(); + for z in self.zwvs.iter().clone() { + response.push(z.z.clone()); + response.push(z.w.clone()); + response.push(z.v.clone()); + } + + response + } + /// Return R pub fn r(&self) -> &Scalar { &self.r diff --git a/src/chain-libs/chain-vote/src/encrypted_vote.rs b/src/chain-libs/chain-vote/src/encrypted_vote.rs index f8a13fda76..79bda44fc6 100644 --- a/src/chain-libs/chain-vote/src/encrypted_vote.rs +++ b/src/chain-libs/chain-vote/src/encrypted_vote.rs @@ -219,6 +219,7 @@ mod tests { #[test] fn unit_vector() { let uv = UnitVector::new(5, 0).unwrap(); + assert_eq!( &uv.iter().collect::>()[..], [true, false, false, false, false] diff --git a/src/event-db/Earthfile b/src/event-db/Earthfile index 40b54f043c..83c2647d90 100644 --- a/src/event-db/Earthfile +++ b/src/event-db/Earthfile @@ -37,15 +37,21 @@ docker-compose: SAVE ARTIFACT docker-compose.yml # Need to be run with the -P flag -test: - FROM ../../+builder +# TODO: Enable this when CI supports passing -P dynamically +# test: +# FROM earthly/dind:alpine - COPY +docker-compose/docker-compose.yml . - WITH DOCKER \ - --compose docker-compose.yml \ - --pull postgres:14 \ - --load migrations:latest=(../../containers/event-db-migrations+docker --data=test) \ - --service migrations \ - --allow-privileged - RUN EVENT_DB_URL="postgres://catalyst-event-dev:CHANGE_ME@localhost/CatalystEventDev" cargo test -p event-db - END +# COPY +docker-compose/docker-compose.yml . +# WITH DOCKER \ +# --compose docker-compose.yml \ +# --pull postgres:14 \ +# --load migrations:latest=(../../containers/event-db-migrations+docker --data=test) \ +# --load test:latest=(../../+builder) \ +# --service migrations \ +# --allow-privileged +# RUN docker run \ +# --network default_default \ +# -e EVENT_DB_URL="postgres://catalyst-event-dev:CHANGE_ME@postgres/CatalystEventDev" \ +# test:latest \ +# cargo test -p event-db +# END diff --git a/src/event-db/src/queries/event/mod.rs b/src/event-db/src/queries/event/mod.rs index 6759cfcae9..1878b7666a 100644 --- a/src/event-db/src/queries/event/mod.rs +++ b/src/event-db/src/queries/event/mod.rs @@ -42,8 +42,8 @@ impl EventDB { LEFT JOIN snapshot ON event.row_id = snapshot.event WHERE event.row_id = $1;"; - const EVENT_GOALS_QUERY: &'static str = "SELECT goal.idx, goal.name - FROM goal + const EVENT_GOALS_QUERY: &'static str = "SELECT goal.idx, goal.name + FROM goal WHERE goal.event_id = $1;"; } @@ -204,14 +204,6 @@ mod tests { assert_eq!( events, vec![ - EventSummary { - id: EventId(0), - name: "Test Fund".to_string(), - starts: Some(DateTime::::from_utc(NaiveDateTime::default(), Utc)), - ends: Some(DateTime::::from_utc(NaiveDateTime::default(), Utc)), - reg_checked: None, - is_final: true, - }, EventSummary { id: EventId(1), name: "Test Fund 1".to_string(), @@ -325,14 +317,6 @@ mod tests { assert_eq!( events, vec![ - EventSummary { - id: EventId(0), - name: "Test Fund".to_string(), - starts: Some(DateTime::::from_utc(NaiveDateTime::default(), Utc)), - ends: Some(DateTime::::from_utc(NaiveDateTime::default(), Utc)), - reg_checked: None, - is_final: true, - }, EventSummary { id: EventId(1), name: "Test Fund 1".to_string(), @@ -359,10 +343,36 @@ mod tests { )), is_final: true, }, + EventSummary { + id: EventId(2), + name: "Test Fund 2".to_string(), + starts: Some(DateTime::::from_utc( + NaiveDateTime::new( + NaiveDate::from_ymd_opt(2021, 5, 1).unwrap(), + NaiveTime::from_hms_opt(12, 0, 0).unwrap() + ), + Utc + )), + ends: Some(DateTime::::from_utc( + NaiveDateTime::new( + NaiveDate::from_ymd_opt(2021, 6, 1).unwrap(), + NaiveTime::from_hms_opt(12, 0, 0).unwrap() + ), + Utc + )), + reg_checked: Some(DateTime::::from_utc( + NaiveDateTime::new( + NaiveDate::from_ymd_opt(2021, 3, 31).unwrap(), + NaiveTime::from_hms_opt(12, 0, 0).unwrap() + ), + Utc + )), + is_final: true, + }, ] ); - let events = event_db.get_events(Some(1), Some(1)).await.unwrap(); + let events = event_db.get_events(Some(1), Some(0)).await.unwrap(); assert_eq!( events, vec![EventSummary { diff --git a/src/event-db/src/queries/search.rs b/src/event-db/src/queries/search.rs index 616602e9d9..7a12fde2d4 100644 --- a/src/event-db/src/queries/search.rs +++ b/src/event-db/src/queries/search.rs @@ -291,18 +291,10 @@ mod tests { .search(search_query.clone(), false, None, None) .await .unwrap(); - assert_eq!(query_result.total, 6); + assert_eq!(query_result.total, 5); assert_eq!( query_result.results, Some(ValueResults::Events(vec![ - EventSummary { - id: EventId(0), - name: "Test Fund".to_string(), - starts: Some(DateTime::::from_utc(NaiveDateTime::default(), Utc)), - ends: Some(DateTime::::from_utc(NaiveDateTime::default(), Utc)), - reg_checked: None, - is_final: true, - }, EventSummary { id: EventId(1), name: "Test Fund 1".to_string(), @@ -416,7 +408,7 @@ mod tests { .search(search_query, true, None, None) .await .unwrap(); - assert_eq!(query_result.total, 6); + assert_eq!(query_result.total, 5); assert_eq!(query_result.results, None); let search_query = SearchQuery { @@ -434,7 +426,7 @@ mod tests { .search(search_query.clone(), false, None, None) .await .unwrap(); - assert_eq!(query_result.total, 6); + assert_eq!(query_result.total, 5); assert_eq!( query_result.results, Some(ValueResults::Events(vec![ @@ -544,14 +536,6 @@ mod tests { )), is_final: true, }, - EventSummary { - id: EventId(0), - name: "Test Fund".to_string(), - starts: Some(DateTime::::from_utc(NaiveDateTime::default(), Utc)), - ends: Some(DateTime::::from_utc(NaiveDateTime::default(), Utc)), - reg_checked: None, - is_final: true, - } ])) ); @@ -598,7 +582,7 @@ mod tests { .search(search_query.clone(), false, None, Some(2)) .await .unwrap(); - assert_eq!(query_result.total, 4); + assert_eq!(query_result.total, 3); assert_eq!( query_result.results, Some(ValueResults::Events(vec![ @@ -680,14 +664,6 @@ mod tests { )), is_final: true, }, - EventSummary { - id: EventId(0), - name: "Test Fund".to_string(), - starts: Some(DateTime::::from_utc(NaiveDateTime::default(), Utc)), - ends: Some(DateTime::::from_utc(NaiveDateTime::default(), Utc)), - reg_checked: None, - is_final: true, - } ])) ); diff --git a/src/event-db/stage_data/dev/00001_fund11_event.sql b/src/event-db/stage_data/dev/00001_fund11_event.sql new file mode 100644 index 0000000000..4749709074 --- /dev/null +++ b/src/event-db/stage_data/dev/00001_fund11_event.sql @@ -0,0 +1,78 @@ +-- F11 +INSERT INTO event ( + row_id, + name, + description, + registration_snapshot_time, + snapshot_start, + voting_power_threshold, + max_voting_power_pct, + review_rewards, + start_time, + end_time, + insight_sharing_start, + proposal_submission_start, + refine_proposals_start, + finalize_proposals_start, + proposal_assessment_start, + assessment_qa_start, + voting_start, + voting_end, + tallying_end, + block0, + block0_hash, + committee_size, + committee_threshold, + extra, + cast_to +) VALUES ( + 11, + 'Fund 11', + 'Catalyst Testnet - Fund 11', + '2023-12-30 21:00:00', -- Registration Snapshot Time + '2023-12-31 00:00:00', -- Snapshot Start. + 450000000, -- Voting Power Threshold + 1, -- Max Voting Power PCT + NULL, -- Review Rewards + '2023-11-03 00:00:00', -- Start Time + '2024-02-01 00:00:00', -- End Time + '2023-11-04 00:00:00', -- Insight Sharing Start + '2023-11-04 00:00:00', -- Proposal Submission Start + '2023-11-04 00:00:00', -- Refine Proposals Start + '2023-11-04 00:00:00', -- Finalize Proposals Start + '2023-11-04 00:00:00', -- Proposal Assessment Start + '2023-11-04 00:00:00', -- Assessment QA Start + '2024-02-02 11:00:00', -- Voting Starts + '2024-02-04 11:00:00', -- Voting Ends + '2024-02-06 11:00:00', -- Tallying Ends + NULL, -- Block 0 Data + NULL, -- Block 0 Hash + 1, -- Committee Size + 1, -- Committee Threshold + NULL, -- Extra + NULL -- Cast to +) ON CONFLICT (row_id) DO UPDATE +SET name = EXCLUDED.name, + description = EXCLUDED.description, + registration_snapshot_time = EXCLUDED.registration_snapshot_time, + snapshot_start = EXCLUDED.snapshot_start, + voting_power_threshold = EXCLUDED.voting_power_threshold, + max_voting_power_pct = EXCLUDED.max_voting_power_pct, + review_rewards = EXCLUDED.review_rewards, + start_time = EXCLUDED.start_time, + end_time = EXCLUDED.end_time, + insight_sharing_start = EXCLUDED.insight_sharing_start, + proposal_submission_start = EXCLUDED.proposal_submission_start, + refine_proposals_start = EXCLUDED.refine_proposals_start, + finalize_proposals_start = EXCLUDED.finalize_proposals_start, + proposal_assessment_start = EXCLUDED.proposal_assessment_start, + assessment_qa_start = EXCLUDED.assessment_qa_start, + voting_start = EXCLUDED.voting_start, + voting_end = EXCLUDED.voting_end, + tallying_end = EXCLUDED.tallying_end, + block0 = EXCLUDED.block0, + block0_hash = EXCLUDED.block0_hash, + committee_size = EXCLUDED.committee_size, + committee_threshold = EXCLUDED.committee_threshold, + extra = EXCLUDED.extra, + cast_to = EXCLUDED.cast_to; diff --git a/src/event-db/stage_data/dev/00001_testfund_event.sql b/src/event-db/stage_data/dev/00001_testfund_event.sql deleted file mode 100644 index 17e365b4f4..0000000000 --- a/src/event-db/stage_data/dev/00001_testfund_event.sql +++ /dev/null @@ -1,54 +0,0 @@ --- F10 -INSERT INTO event ( - row_id, - name, - description, - registration_snapshot_time, - snapshot_start, - voting_power_threshold, - max_voting_power_pct, - review_rewards, - start_time, - end_time, - insight_sharing_start, - proposal_submission_start, - refine_proposals_start, - finalize_proposals_start, - proposal_assessment_start, - assessment_qa_start, - voting_start, - voting_end, - tallying_end, - block0, - block0_hash, - committee_size, - committee_threshold, - extra, - cast_to -) VALUES ( - 0, - 'Test Fund', - 'Catalyst Dev Environment - Test Fund', - '1970-01-01 00:00:00', -- Registration Snapshot Time - '1970-01-01 00:00:00', -- Snapshot Start. - 450000000, -- Voting Power Threshold - 1, -- Max Voting Power PCT - NULL, -- Review Rewards - '1970-01-01 00:00:00', -- Start Time - '1970-01-01 00:00:00', -- End Time - '1970-01-01 00:00:00', -- Insight Sharing Start - '1970-01-01 00:00:00', -- Proposal Submission Start - '1970-01-01 00:00:00', -- Refine Proposals Start - '1970-01-01 00:00:00', -- Finalize Proposals Start - '1970-01-01 00:00:00', -- Proposal Assessment Start - '1970-01-01 00:00:00', -- Assessment QA Start - '1970-01-01 00:00:00', -- Voting Starts - '1970-01-01 00:00:00', -- Voting Ends - '1970-01-01 00:00:00', -- Tallying Ends - NULL, -- Block 0 Data - NULL, -- Block 0 Hash - 1, -- Committee Size - 1, -- Committee Threshold - NULL, -- Extra - NULL -- Cast to -); \ No newline at end of file diff --git a/src/event-db/stage_data/dev/00002_testfund_ideascale_params.sql b/src/event-db/stage_data/dev/00002_fund11_params.sql similarity index 62% rename from src/event-db/stage_data/dev/00002_testfund_ideascale_params.sql rename to src/event-db/stage_data/dev/00002_fund11_params.sql index 01e91d6a72..aa167e55bd 100644 --- a/src/event-db/stage_data/dev/00002_testfund_ideascale_params.sql +++ b/src/event-db/stage_data/dev/00002_fund11_params.sql @@ -1,20 +1,19 @@ --- Define F10 IdeaScale parameters. +-- Define F11 IdeaScale parameters. INSERT INTO config (id, id2, id3, value) VALUES ( 'ideascale', - '0', + '11', '', - '{ - "group_id": 37429, - "review_stage_ids": [171], - "nr_allocations": [1, 1], - "campaign_group_id": 88, + '{ + "group_id": 31051, + "review_stage_ids": [143, 145], + "nr_allocations": [30, 80], + "campaign_group_id": 63, "questions": { - "Question 1": "Impact / Alignment", - "Question 2": "Feasibility", - "Question 3": "Auditability" + "You are reviewing the positive IMPACT this project will have on the Cardano Ecosystem.\nHas this project clearly demonstrated in all aspects of the proposal that it will have a positive impact on the Cardano Ecosystem?": "Impact / Alignment", + "You are reviewing the FEASIBILITY of this project.\nIs this project feasible based on the proposal submitted? Does the plan and associated budget and milestones look achievable? Does the team have the skills, experience, capability and capacity to complete the project successfully?": "Feasibility", + "You are reviewing the VALUE FOR MONEY this represents for the Treasury and the Community\nIs the funding amount requested for this project reasonable and does it provide good Value for Money to the Treasury?": "Auditability" }, - "stage_ids": [4684, 4685, 4686], - "anonymize_start_id": 5000, + "stage_ids": [4590, 4596, 4602, 4608, 4614, 4620, 4626, 4632, 4638, 4644, 4650, 4656, 4662, 4591, 4597, 4603, 4609, 4615, 4621, 4627, 4633, 4639, 4645, 4651, 4657, 4663, 4592, 4598, 4604, 4610, 4616, 4622, 4628, 4634, 4640, 4646, 4652, 4658, 4664], "proposals": { "field_mappings": { "proposer_url": ["relevant_link_1", "website__github_repository__or_any_other_relevant_link__", "relevant_link_3"], @@ -49,12 +48,14 @@ INSERT INTO config (id, id2, id3, value) VALUES ( "score_field": "Rating" } }' -); +) ON CONFLICT (id, id2, id3) DO UPDATE +SET value = EXCLUDED.value; --- Use F10 params for event with row_id = 10. +-- Use F11 params for event with row_id = 11. INSERT INTO config (id, id2, id3, value) VALUES ( 'event', 'ideascale_params', - '0', - '{"params_id": "TestFund"}' -); + '11', + '{"params_id": "F11"}' +) ON CONFLICT (id, id2, id3) DO UPDATE +SET value = EXCLUDED.value; diff --git a/src/event-db/stage_data/testnet/00001_fund10_event.sql b/src/event-db/stage_data/testnet/00001_fund10_event.sql index 8b6f0cb6c3..200dd1e99a 100644 --- a/src/event-db/stage_data/testnet/00001_fund10_event.sql +++ b/src/event-db/stage_data/testnet/00001_fund10_event.sql @@ -51,4 +51,28 @@ INSERT INTO event ( 1, -- Committee Threshold NULL, -- Extra NULL -- Cast to -); \ No newline at end of file +) ON CONFLICT (row_id) DO UPDATE +SET name = EXCLUDED.name, + description = EXCLUDED.description, + registration_snapshot_time = EXCLUDED.registration_snapshot_time, + snapshot_start = EXCLUDED.snapshot_start, + voting_power_threshold = EXCLUDED.voting_power_threshold, + max_voting_power_pct = EXCLUDED.max_voting_power_pct, + review_rewards = EXCLUDED.review_rewards, + start_time = EXCLUDED.start_time, + end_time = EXCLUDED.end_time, + insight_sharing_start = EXCLUDED.insight_sharing_start, + proposal_submission_start = EXCLUDED.proposal_submission_start, + refine_proposals_start = EXCLUDED.refine_proposals_start, + finalize_proposals_start = EXCLUDED.finalize_proposals_start, + proposal_assessment_start = EXCLUDED.proposal_assessment_start, + assessment_qa_start = EXCLUDED.assessment_qa_start, + voting_start = EXCLUDED.voting_start, + voting_end = EXCLUDED.voting_end, + tallying_end = EXCLUDED.tallying_end, + block0 = EXCLUDED.block0, + block0_hash = EXCLUDED.block0_hash, + committee_size = EXCLUDED.committee_size, + committee_threshold = EXCLUDED.committee_threshold, + extra = EXCLUDED.extra, + cast_to = EXCLUDED.cast_to; diff --git a/src/event-db/stage_data/testnet/00002_fund10_ideascale_params.sql b/src/event-db/stage_data/testnet/00002_fund10_ideascale_params.sql index 81156fe5b9..48678eae7d 100644 --- a/src/event-db/stage_data/testnet/00002_fund10_ideascale_params.sql +++ b/src/event-db/stage_data/testnet/00002_fund10_ideascale_params.sql @@ -48,7 +48,8 @@ INSERT INTO config (id, id2, id3, value) VALUES ( "score_field": "Rating" } }' -); +) ON CONFLICT (id, id2, id3) DO UPDATE +SET value = EXCLUDED.value; -- Use F10 params for event with row_id = 10. INSERT INTO config (id, id2, id3, value) VALUES ( @@ -56,4 +57,6 @@ INSERT INTO config (id, id2, id3, value) VALUES ( 'ideascale_params', '10', '{"params_id": "F10"}' -); +) ON CONFLICT (id, id2, id3) DO UPDATE +SET value = EXCLUDED.value; + diff --git a/src/jormungandr/testing/jormungandr-automation/src/jormungandr/rest/raw.rs b/src/jormungandr/testing/jormungandr-automation/src/jormungandr/rest/raw.rs index 80f08e3c3f..212f777d6a 100644 --- a/src/jormungandr/testing/jormungandr-automation/src/jormungandr/rest/raw.rs +++ b/src/jormungandr/testing/jormungandr-automation/src/jormungandr/rest/raw.rs @@ -249,6 +249,12 @@ impl RawRest { headers } + fn construct_headers_json(&self) -> HeaderMap { + let mut headers = HeaderMap::new(); + headers.insert(CONTENT_TYPE, HeaderValue::from_static("application/json")); + headers + } + fn post( &self, path: &str, @@ -310,7 +316,7 @@ impl RawRest { ) -> Result { self.client .post(self.path(ApiVersion::V1, "fragments")) - .headers(self.construct_headers()) + .headers(self.construct_headers_json()) .json(&FragmentsBatch { fail_fast, fragments, diff --git a/src/sign/Cargo.toml b/src/sign/Cargo.toml new file mode 100644 index 0000000000..ebdc0cf3af --- /dev/null +++ b/src/sign/Cargo.toml @@ -0,0 +1,37 @@ +[package] +name = "sign" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +chain-crypto = { path = "../chain-libs/chain-crypto" } +chain-vote = { path = "../chain-libs/chain-vote" } +jormungandr-lib = { path = "../jormungandr/jormungandr-lib" } +chain-addr = { path = "../chain-libs/chain-addr" } +chain-core = { path = "../chain-libs/chain-core" } +chain-impl-mockchain = { path = "../chain-libs/chain-impl-mockchain" ,features= ["audit"]} +chain-ser = { path = "../chain-libs/chain-ser" } +chain-storage = { path = "../chain-libs/chain-storage" } + + +hex = "0.4" +cryptoxide = "0.4.2" +rand_chacha = "0.3" + +clap = { version = "4", features = ["derive", "cargo"] } +clap_complete_command = { version = "0.5" } + +color-eyre = "0.6" +thiserror = "1.0.40" +csv = "1.1" + +serde = "1.0" +serde_json = "1.0" +serde_yaml = "0.8.17" +rand = "0.8.3" +bech32 = "0.8" +rand_core = { version = "0.5.1", default-features = false } +ed25519-dalek = "1.0.1" +reqwest = { version = "*", features = ["blocking","json"] } \ No newline at end of file diff --git a/src/sign/README.md b/src/sign/README.md new file mode 100644 index 0000000000..9e0d1a8762 --- /dev/null +++ b/src/sign/README.md @@ -0,0 +1,39 @@ +# **Vote** Fragment generator and signer: +Generates vote fragments and signs them accordingly + +## Specifications + [*see here for format.abnf*](../chain-libs/chain-impl-mockchain/doc/format.abnf) + + [*see here for format.md*](../chain-libs/chain-impl-mockchain/doc/format.md) + +## Ingredients for generating a **vote** fragment + +- Election public key +- Alice public key +- Alice private key +- proposal to vote on +- vote plan id (hash of voteplan) +- epoch +- slot + +*Example usage:* + +``` +cargo build --release -p sign +``` + +*Generate raw vote fragment in byte representation* + +```bash + +ELECTION_PUB_KEY=ristretto255_votepk1ppxnuxrqa4728evnp2ues000uvwvwtxmtf77ejc29lknjuqqu44s4cfmja +ALICE_SK=56e367979579e2ce27fbd305892b0706b7dede999a534a864a7430a5c6aefd3c +ALICE_PK=ea084d2d80ed0ab681333d934efc56df3868d13d46a2de3b7f27f40b62e5344d +PROPOSAL=5 +VOTE_PLAN_ID=36ad42885189a0ac3438cdb57bc8ac7f6542e05a59d1f2e4d1d38194c9d4ac7b +EPOCH=0 +SLOT=0 + +./target/release/sign --election-pub-key $ELECTION_PUB_KEY --private-key $ALICE_SK --public-key $ALICE_PK --proposal $PROPOSAL --vote-plan-id $VOTE_PLAN_ID --epoch $EPOCH --slot $SLOT + +``` \ No newline at end of file diff --git a/src/sign/src/fragment.rs b/src/sign/src/fragment.rs new file mode 100644 index 0000000000..e3bb280063 --- /dev/null +++ b/src/sign/src/fragment.rs @@ -0,0 +1,325 @@ +//! Generate Fragments based upon specification +//! Reference specfication for more context in relation to constants outlined in this file. + +use chain_ser::packer::Codec; + +use chain_vote::{Ciphertext, ProofOfCorrectVote}; +use ed25519_dalek::{ed25519::signature::Signature, *}; +use std::error; + +/// Payload type = 2 +/// %x02 ENCRYPTED-VOTE PROOF-VOTE ; Private payload +const ENCRYPTED_PAYLOAD: u8 = 2; + +/// VoteCast tag +const VOTE_CAST_TAG: u8 = 11; + +/// INPUT-ACCOUNT = %xff VALUE UNTAG-ACCOUNT-ID +const INPUT_ACCOUNT: u8 = 255; + +/// Only 1 input (subsequently 1 witness), no output +/// VoteCast TX should have only 1 input, 0 output and 1 witness (signature). +const INPUT: u8 = 1; +const OUTPUT: u8 = 0; + +/// Nonce +const NONCE: u32 = 0; + +/// Type = 2 +/// utxo witness scheme +/// ED25519 Signature (64 bytes) +const WITNESS_SCHEME: u8 = 2; + +/// Padding +const PADDING: u8 = 0; + +/// Values in inputs: redundant for voting +const VALUE: u64 = 0; + +/// Padding and Tag are 1 byte each; size must be added to the fragment size +const PADDING_AND_TAG_SIZE: u32 = 2; + +/// Generate vote fragment in bytes +pub fn generate_vote_fragment( + keypair: Keypair, + encrypted_vote: Vec, + proof: Vec, + proposal: u8, + vote_plan_id: &[u8], + epoch: u32, + slot: u32, +) -> Result, Box> { + let mut vote_cast = Codec::new(Vec::new()); + + vote_cast.put_bytes(vote_plan_id)?; + vote_cast.put_u8(proposal)?; + vote_cast.put_u8(ENCRYPTED_PAYLOAD)?; + vote_cast.put_bytes(&encrypted_vote)?; + vote_cast.put_bytes(&proof)?; + + let data_to_sign = vote_cast.into_inner().clone(); + + let (inputs, witness) = + compose_inputs_and_witnesses(keypair, data_to_sign.clone(), epoch, slot)?; + + let mut vote_cast = Codec::new(Vec::new()); + vote_cast.put_bytes(&data_to_sign)?; + vote_cast.put_bytes(&inputs)?; + vote_cast.put_bytes(&witness)?; + + let data = vote_cast.into_inner(); + + // prepend msg with size of fragment msg + let mut vote_cast = Codec::new(Vec::new()); + vote_cast.put_be_u32(data.len() as u32 + PADDING_AND_TAG_SIZE)?; + vote_cast.put_u8(PADDING)?; + vote_cast.put_u8(VOTE_CAST_TAG)?; + vote_cast.put_bytes(data.as_slice())?; + + Ok(vote_cast.into_inner()) +} + +/// Generate Inputs-Outputs-Witnesses in bytes +fn compose_inputs_and_witnesses( + keypair: Keypair, + data_to_sign: Vec, + epoch: u32, + slot: u32, +) -> Result<(Vec, Vec), Box> { + let mut inputs = Codec::new(Vec::new()); + + inputs.put_be_u32(epoch)?; + inputs.put_be_u32(slot)?; + inputs.put_u8(INPUT)?; + inputs.put_u8(OUTPUT)?; + + inputs.put_u8(INPUT_ACCOUNT)?; + inputs.put_be_u64(VALUE)?; + inputs.put_bytes(keypair.public.as_bytes())?; + let inputs = inputs.into_inner().clone(); + + let mut tx_data_to_sign = Codec::new(Vec::new()); + tx_data_to_sign.put_bytes(&data_to_sign.clone())?; + tx_data_to_sign.put_bytes(&inputs.clone())?; + + let signature = keypair.sign(&tx_data_to_sign.into_inner()); + + let mut witness = Codec::new(Vec::new()); + witness.put_u8(WITNESS_SCHEME)?; + witness.put_be_u32(NONCE)?; + witness.put_bytes(signature.as_bytes())?; + let witnesses = witness.into_inner(); + + Ok((inputs, witnesses)) +} + +/// compose encrypted vote and proof in bytes +pub fn compose_encrypted_vote_part( + ciphertexts: Vec, + proof: ProofOfCorrectVote, +) -> Result<(Vec, Vec), Box> { + let mut encrypted_vote = Codec::new(Vec::new()); + + let size_element = ciphertexts.iter().len(); + for cipher in ciphertexts.iter() { + encrypted_vote.put_bytes(&cipher.to_bytes())?; + } + + let encrypted_bytes = encrypted_vote.into_inner(); + + // prepend with SIZE-ELEMENT-8BIT + let mut encrypted_vote = Codec::new(Vec::new()); + encrypted_vote.put_u8(size_element as u8)?; + encrypted_vote.put_bytes(encrypted_bytes.as_slice())?; + + let mut proof_bytes = Codec::new(Vec::new()); + + for announcement in proof.announcments_group_elements() { + proof_bytes.put_bytes(&announcement.to_bytes())?; + } + + for cipher in proof.ds() { + proof_bytes.put_bytes(&cipher.to_bytes())?; + } + + for response in proof.zwvs() { + proof_bytes.put_bytes(&response.to_bytes())?; + } + + proof_bytes.put_bytes(&proof.r().to_bytes())?; + + // prepend with SIZE-ELEMENT-8BIT + let mut proof_vote = Codec::new(Vec::new()); + proof_vote.put_u8(proof.len() as u8)?; + proof_vote.put_bytes(proof_bytes.into_inner().as_slice())?; + + let mut proof = Codec::new(Vec::new()); + + proof.put_bytes(&proof_vote.into_inner())?; + + Ok((proof.into_inner(), encrypted_vote.into_inner())) +} + +#[cfg(test)] +mod tests { + + use chain_addr::{AddressReadable, Discrimination}; + + use chain_impl_mockchain::{fragment::Fragment, transaction::InputEnum}; + use chain_ser::{deser::DeserializeFromSlice, packer::Codec}; + + use ed25519_dalek::Keypair; + use rand_core::OsRng; + + use chain_vote::{ + Ciphertext, Crs, ElectionPublicKey, MemberCommunicationKey, MemberState, ProofOfCorrectVote, + }; + + use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; + + use crate::fragment::{compose_encrypted_vote_part, generate_vote_fragment}; + use jormungandr_lib::interfaces::AccountIdentifier; + + #[test] + fn fragment_generation() { + let mut csprng = OsRng; + + // User key for signing witness + let keypair = Keypair::generate(&mut csprng); + + let pk = keypair.public.as_bytes().clone(); + + println!("Secret key: {}", hex::encode(keypair.secret.as_bytes())); + println!("Public key: {}", hex::encode(keypair.public.as_bytes())); + + let mut rng = ChaCha20Rng::from_seed([0u8; 32]); + + // vote plan id + let vote_plan_id = + "36ad42885189a0ac3438cdb57bc8ac7f6542e05a59d1f2e4d1d38194c9d4ac7b".to_owned(); + + // election public key + let ek = create_election_pub_key(vote_plan_id.clone(), rng.clone()); + + println!("election public key {:?}", hex::encode(ek.to_bytes())); + + // vote + let vote = chain_vote::Vote::new(2, 1 as usize).unwrap(); + + let crs = chain_vote::Crs::from_hash(&hex::decode(vote_plan_id.as_bytes()).unwrap()); + + let (ciphertexts, proof) = ek.encrypt_and_prove_vote(&mut rng, &crs, vote); + let (proof, encrypted_vote) = + compose_encrypted_vote_part(ciphertexts.clone(), proof).unwrap(); + + // generate fragment + let fragment_bytes = generate_vote_fragment( + keypair, + encrypted_vote, + proof, + 5, + &hex::decode(vote_plan_id.clone()).unwrap(), + 0, + 0, + ) + .unwrap(); + + println!( + "generated fragment: {:?} size:{:?}", + hex::encode(fragment_bytes.clone()), + fragment_bytes.len() + ); + + let fragment = Fragment::deserialize_from_slice(&mut Codec::new(&fragment_bytes)).unwrap(); + + if let Fragment::VoteCast(tx) = fragment.clone() { + let _fragment_id = fragment.hash(); + + let input = tx.as_slice().inputs().iter().next().unwrap().to_enum(); + let caster = if let InputEnum::AccountInput(account_id, _value) = input { + AccountIdentifier::from(account_id).into_address(Discrimination::Production, "ca") + } else { + panic!("unhandled input "); + }; + let certificate = tx.as_slice().payload().into_payload(); + + let voting_key_61824_format = AddressReadable::from_string("ca", &caster.to_string()) + .unwrap() + .to_address(); + + let voting_key = voting_key_61824_format.public_key().unwrap().to_string(); + + assert_eq!(voting_key, hex::encode(pk)); + assert_eq!(certificate.proposal_index(), 5); + assert_eq!(certificate.vote_plan().to_string(), vote_plan_id); + } + } + + fn create_election_pub_key(shared_string: String, mut rng: ChaCha20Rng) -> ElectionPublicKey { + let h = Crs::from_hash(shared_string.as_bytes()); + let mc1 = MemberCommunicationKey::new(&mut rng); + let mc = [mc1.to_public()]; + let threshold = 1; + let m1 = MemberState::new(&mut rng, threshold, &h, &mc, 0); + let participants = vec![m1.public_key()]; + let ek = ElectionPublicKey::from_participants(&participants); + ek + } + + #[test] + fn generate_keys_from_bytes() { + let pk = hex::decode( + "ac247e6cbc2106a8858d67a9b6aa9fc6105a2f42abfd8d269f4096488b7e5d81".to_string(), + ) + .unwrap(); + + let mut sk = hex::decode( + "40cc7f02e04324b63a4db949854d5f24c9041a2bebe9b42064ff868071d1d72d".to_string(), + ) + .unwrap(); + + sk.extend(pk.clone()); + let keys = sk.clone(); + let keypair: Keypair = Keypair::from_bytes(&keys).unwrap(); + + assert_eq!(hex::encode(keypair.public.as_bytes()), hex::encode(pk)); + + println!("Secret key: {}", hex::encode(keypair.secret.as_bytes())); + println!("Public key: {}", hex::encode(keypair.public.as_bytes())); + } + + #[test] + fn encrypted_vote_generation() { + let mut rng = ChaCha20Rng::from_seed([0u8; 32]); + + // vote plan id + let vote_plan_id = + "36ad42885189a0ac3438cdb57bc8ac7f6542e05a59d1f2e4d1d38194c9d4ac7b".to_owned(); + + let shared_string = vote_plan_id.to_owned(); + + // election public key + let ek = create_election_pub_key(shared_string, rng.clone()); + + let vote = chain_vote::Vote::new(2, 1 as usize).unwrap(); + let crs = chain_vote::Crs::from_hash(vote_plan_id.as_bytes()); + + let (ciphertexts, proof) = ek.encrypt_and_prove_vote(&mut rng, &crs, vote); + let (proof, mut enc_vote) = + compose_encrypted_vote_part(ciphertexts.clone(), proof).unwrap(); + + // remove size element, size element is 2 meaning there two ciphertexts + enc_vote.remove(0); + // each ciphertext consists of two 32 byte group elements + let (cipher_a, cipher_b) = enc_vote.split_at(64); + + let _cipher_a = Ciphertext::from_bytes(cipher_a).unwrap(); + let _cipher_b = Ciphertext::from_bytes(cipher_b).unwrap(); + + let mut msg = Codec::new(proof.as_slice()); + + let p = ProofOfCorrectVote::from_buffer(&mut msg).unwrap(); + + assert_eq!(p.len(), 1); + } +} diff --git a/src/sign/src/main.rs b/src/sign/src/main.rs new file mode 100644 index 0000000000..0f5d2a94ac --- /dev/null +++ b/src/sign/src/main.rs @@ -0,0 +1,94 @@ +//! +//! Fragment generator +//! + +use bech32::Error as Bech32Error; +use bech32::FromBase32; +use chain_vote::ElectionPublicKey; +use clap::Parser; +use color_eyre::Result; +use rand::SeedableRng; +use rand_chacha::ChaCha20Rng; + +use ed25519_dalek::*; +use std::error::Error; + +use crate::fragment::{compose_encrypted_vote_part, generate_vote_fragment}; + +pub mod fragment; +pub mod network; + +/// +/// Args defines and declares CLI behaviour within the context of clap +/// +#[derive(Parser, Debug, Clone)] +#[clap(about, version, author)] +pub struct Args { + /// Election public key issued by Trent + #[clap(short, long)] + pub election_pub_key: String, + /// Public key of Alice + #[clap(short, long)] + public_key: String, + /// Private key of Alice + #[clap(short, long)] + private_key: String, + /// proposal to vote on + #[clap(short, long)] + proposal: u8, + /// Epoch + #[clap(short, long)] + epoch: u32, + /// Slot + #[clap(short, long)] + slot: u32, + /// vote plan hash + #[clap(short, long)] + vote_plan_id: String, +} + +fn main() -> Result<(), Box> { + color_eyre::install()?; + + let args = Args::parse(); + let mut rng = ChaCha20Rng::from_seed([0u8; 32]); + + let pk = hex::decode(args.public_key)?; + let mut sk = hex::decode(args.private_key)?; + + // Election pub key published as a Bech32_encoded address + // which consists of 3 parts: A Human-Readable Part (HRP) + Separator + Data: + let (_hrp, data, _variant) = + bech32::decode(&args.election_pub_key).map_err(Bech32Error::from)?; + + let election_pk = Vec::::from_base32(&data).map_err(Bech32Error::from)?; + + // join sk+pk together, api requirement + sk.extend(pk.clone()); + let keypair: Keypair = Keypair::from_bytes(&sk)?; + let vote = chain_vote::Vote::new(2, 1_usize)?; + // common reference string + let crs = chain_vote::Crs::from_hash(&hex::decode(args.vote_plan_id.clone())?); + + // parse ek key + let ek = ElectionPublicKey::from_bytes(&election_pk) + .ok_or("unable to parse election pub key".to_string())?; + + let (ciphertexts, proof) = ek.encrypt_and_prove_vote(&mut rng, &crs, vote); + let (proof, encrypted_vote) = compose_encrypted_vote_part(ciphertexts.clone(), proof)?; + + let fragment_bytes = generate_vote_fragment( + keypair, + encrypted_vote, + proof, + args.proposal, + &hex::decode(args.vote_plan_id)?, + args.epoch, + args.slot, + )?; + + // fragment in hex: output consumed as input to another program + println!("{:?}", hex::encode(fragment_bytes.clone())); + + Ok(()) +} diff --git a/src/sign/src/network.rs b/src/sign/src/network.rs new file mode 100644 index 0000000000..ab3d2042af --- /dev/null +++ b/src/sign/src/network.rs @@ -0,0 +1,142 @@ +//! +//! Test code +//! Example code on how to send a raw vote fragment +//! + +use color_eyre::Result; + +use reqwest::blocking::Client; +use reqwest::header::HeaderMap; + +use reqwest::Url; +use serde::Deserialize as Deser; +use serde::Serialize as Ser; + +use reqwest::header::{HeaderValue, CONTENT_TYPE}; + +/// Node responds with yay or nay and associated metadata such as fragment id hash +#[derive(Ser, Deser, Debug)] +pub struct NodeResponse { + pub accepted: Vec, + pub rejected: Vec, +} + +/// Vote fragment rejected +#[derive(Ser, Deser, Debug)] +pub struct Rejected { + pub id: String, + pub reason: String, +} + +/// Vote fragment accepted +#[derive(Ser, Deser, Debug)] +pub struct Accepted { + pub id: String, +} + +/// Simple toy network network client for sending vote fragments +pub struct Network { + pub client: Client, + /// URL for posting a signed vote fragment + /// e.g https://core.projectcatalyst.io/api/v0/message + pub fragment_url: String, +} + +impl Network { + pub fn new(fragment_url: String) -> Self { + Self { + client: Client::new(), + fragment_url, + } + } + + // Send single vote fragment to node + pub fn send_fragment( + &self, + fragment: Vec, + ) -> Result> { + Ok(self + .client + .post(Url::parse(&self.fragment_url)?) + .headers(self.construct_headers()) + .body(fragment) + .send()?) + } + + /// construct headers for octet-stream + pub fn construct_headers(&self) -> HeaderMap { + let mut headers = HeaderMap::new(); + headers.insert( + CONTENT_TYPE, + HeaderValue::from_static("application/octet-stream"), + ); + headers + } +} + +#[cfg(test)] +mod tests { + use crate::network::{Network, NodeResponse}; + use ed25519_dalek::Keypair; + use rand_chacha::{rand_core::SeedableRng, ChaCha20Rng}; + use rand_core::OsRng; + + use crate::fragment::{compose_encrypted_vote_part, generate_vote_fragment}; + use chain_vote::{Crs, ElectionPublicKey, MemberCommunicationKey, MemberState}; + + fn create_election_pub_key(shared_string: String, mut rng: ChaCha20Rng) -> ElectionPublicKey { + let h = Crs::from_hash(shared_string.as_bytes()); + let mc1 = MemberCommunicationKey::new(&mut rng); + let mc = [mc1.to_public()]; + let threshold = 1; + let m1 = MemberState::new(&mut rng, threshold, &h, &mc, 0); + let participants = vec![m1.public_key()]; + ElectionPublicKey::from_participants(&participants) + } + + #[test] + fn send_raw_fragment() { + let client = Network::new("https://core.dev.projectcatalyst.io/api/v0/message".to_string()); + + let mut csprng = OsRng; + + // User key for signing witness + let keypair = Keypair::generate(&mut csprng); + + let mut rng = ChaCha20Rng::from_seed([0u8; 32]); + + // vote plan id + let vote_plan_id = + "36ad42885189a0ac3438cdb57bc8ac7f6542e05a59d1f2e4d1d38194c9d4ac7b".to_owned(); + + // election public key + let ek = create_election_pub_key(vote_plan_id.clone(), rng.clone()); + + // vote + let vote = chain_vote::Vote::new(2, 1_usize).unwrap(); + + let crs = chain_vote::Crs::from_hash(&hex::decode(vote_plan_id.as_bytes()).unwrap()); + + let (ciphertexts, proof) = ek.encrypt_and_prove_vote(&mut rng, &crs, vote); + let (proof, encrypted_vote) = + compose_encrypted_vote_part(ciphertexts.clone(), proof).unwrap(); + + // generate fragment + let fragment_bytes = generate_vote_fragment( + keypair, + encrypted_vote, + proof, + 5, + &hex::decode(vote_plan_id.clone()).unwrap(), + 560, + 120, + ) + .unwrap(); + + let response = client.send_fragment(fragment_bytes).unwrap(); + + let resp_json = response.json::().unwrap(); + + println!("{:?}", resp_json); + } +} diff --git a/src/vit-servicing-station-f10/.env b/src/vit-servicing-station-f10/.env new file mode 100644 index 0000000000..2944ef15fc --- /dev/null +++ b/src/vit-servicing-station-f10/.env @@ -0,0 +1,2 @@ +DATABASE_URL=./db/database.sqlite3 +MIGRATION_DIRECTORY=./vit-servicing-station-lib-f10/migrations diff --git a/src/vit-servicing-station-f10/.envrc b/src/vit-servicing-station-f10/.envrc new file mode 100644 index 0000000000..3550a30f2d --- /dev/null +++ b/src/vit-servicing-station-f10/.envrc @@ -0,0 +1 @@ +use flake diff --git a/src/vit-servicing-station-f10/.github/CODEOWNERS b/src/vit-servicing-station-f10/.github/CODEOWNERS new file mode 100644 index 0000000000..d59646f60e --- /dev/null +++ b/src/vit-servicing-station-f10/.github/CODEOWNERS @@ -0,0 +1,8 @@ +# DEVOPS + +/.github/action/nix-common-setup* @input-output-hk/jormugandr-devops +/.github/workflows/nix.yml @input-output-hk/jormugandr-devops +/default.nix @input-output-hk/jormugandr-devops +/flake.lock @input-output-hk/jormugandr-devops +/flake.nix @input-output-hk/jormugandr-devops +/shell.nix @input-output-hk/jormugandr-devops diff --git a/src/vit-servicing-station-f10/.github/actions/nix-common-setup/action.yml b/src/vit-servicing-station-f10/.github/actions/nix-common-setup/action.yml new file mode 100644 index 0000000000..16805fc798 --- /dev/null +++ b/src/vit-servicing-station-f10/.github/actions/nix-common-setup/action.yml @@ -0,0 +1,18 @@ +name: Setup Nix Environment +inputs: + CACHIX_AUTH_TOKEN: + required: true + description: 'Cachix Auth Token' +runs: + using: "composite" + steps: + + - name: Installing Nix + uses: cachix/install-nix-action@v16 + with: + nix_path: nixpkgs=channel:nixpkgs-unstable + + - uses: cachix/cachix-action@v10 + with: + name: vit + authToken: '${{ inputs.CACHIX_AUTH_TOKEN }}' diff --git a/src/vit-servicing-station-f10/.github/dependabot.yml b/src/vit-servicing-station-f10/.github/dependabot.yml new file mode 100644 index 0000000000..29447d7273 --- /dev/null +++ b/src/vit-servicing-station-f10/.github/dependabot.yml @@ -0,0 +1,13 @@ +version: 2 +updates: + + - package-ecosystem: github-actions + directory: "/" + schedule: + interval: daily + time: '00:00' + timezone: UTC + open-pull-requests-limit: 10 + commit-message: + prefix: "chore" + include: "scope" diff --git a/src/vit-servicing-station-f10/.github/workflows/api.yml b/src/vit-servicing-station-f10/.github/workflows/api.yml new file mode 100644 index 0000000000..12e6a58bf4 --- /dev/null +++ b/src/vit-servicing-station-f10/.github/workflows/api.yml @@ -0,0 +1,21 @@ +name: API Check + +on: + pull_request: + paths: + - "doc/api/*.yaml" + +jobs: + validate: + runs-on: ubuntu-latest + name: Validate + steps: + - name: Checkout + uses: actions/checkout@v3 + + - name: Check that the OpenAPI file is valid + uses: morigs/lint-api-docs-action@v1 + with: + files-glob: ./doc/api/*.yaml + ruleset-file: ./.spectral.yaml + fail-severity: warn diff --git a/src/vit-servicing-station-f10/.github/workflows/api_gh.yml b/src/vit-servicing-station-f10/.github/workflows/api_gh.yml new file mode 100644 index 0000000000..5eef0fcac1 --- /dev/null +++ b/src/vit-servicing-station-f10/.github/workflows/api_gh.yml @@ -0,0 +1,37 @@ +name: Api gh-pages + +# Run on each push to master +on: + push: + branches: [ master ] + paths: + - 'doc/api/*.yaml' + +jobs: + api_gh: + runs-on: ubuntu-latest + name: gh-pages + steps: + - name: Checkout + uses: actions/checkout@v3 + + - name: Bundle + uses: seeebiii/redoc-cli-github-action@v10 + with: + args: 'bundle doc/api/v0.yaml -o html/index.html' + + - name: Check Result + run: | + ls -al + test -f html/index.html || (echo "Missing index.html from previous step." && exit 1) + + - name: Deploy + uses: peaceiris/actions-gh-pages@v3 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + user_name: 'github-actions[bot]' + user_email: 'github-actions[bot]@users.noreply.github.com' + commit_message: ${{ github.event.head_commit.message }} + publish_dir: ./html + publish_branch: gh-pages # deploying branch + diff --git a/src/vit-servicing-station-f10/.github/workflows/audit.yml b/src/vit-servicing-station-f10/.github/workflows/audit.yml new file mode 100644 index 0000000000..2edc0ba292 --- /dev/null +++ b/src/vit-servicing-station-f10/.github/workflows/audit.yml @@ -0,0 +1,30 @@ +name: Security audit +on: + push: + paths: + - Cargo.lock +jobs: + security_audit: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + + - id: ls-crates-io-index + name: Get head commit hash of crates.io registry index + run: | + commit=$( + git ls-remote --heads https://github.com/rust-lang/crates.io-index.git master | + cut -f 1 + ) + echo "::set-output name=head::$commit" + - name: Cache cargo registry index + uses: actions/cache@v3 + with: + path: ~/.cargo/registry/index + key: cargo-index-${{ steps.ls-crates-io-index.outputs.head }} + restore-keys: | + cargo-index- + + - uses: actions-rs/audit-check@v1 + with: + token: ${{ secrets.GITHUB_TOKEN }} diff --git a/src/vit-servicing-station-f10/.github/workflows/nix.yml b/src/vit-servicing-station-f10/.github/workflows/nix.yml new file mode 100644 index 0000000000..25b6da75f3 --- /dev/null +++ b/src/vit-servicing-station-f10/.github/workflows/nix.yml @@ -0,0 +1,45 @@ +name: Nix +on: + push: + branches: + - master + - catalyst-fund* + pull_request: + +jobs: + build-server: + name: Build server + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v3 + - name: Setup + uses: ./.github/actions/nix-common-setup + with: + CACHIX_AUTH_TOKEN: ${{ secrets.CACHIX_AUTH_TOKEN }} + - name: Flake check + run: nix flake check + - name: Build + run: nix build .#vit-servicing-station-server + + build: + name: Build ${{ matrix.package }} + needs: build-server + strategy: + fail-fast: false + matrix: + package: + - cli + - lib + - tests + continue-on-error: true + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v3 + - name: Setup + uses: ./.github/actions/nix-common-setup + with: + CACHIX_AUTH_TOKEN: ${{ secrets.CACHIX_AUTH_TOKEN }} + - name: Build + run: nix build .#vit-servicing-station-${{ matrix.package }} diff --git a/src/vit-servicing-station-f10/.github/workflows/release.yml b/src/vit-servicing-station-f10/.github/workflows/release.yml new file mode 100644 index 0000000000..1f34cc5b9f --- /dev/null +++ b/src/vit-servicing-station-f10/.github/workflows/release.yml @@ -0,0 +1,260 @@ +on: + push: + tags: + - 'v[0-9].*' # Release tags matching v*, i.e. v1.0, v20.15.10 + +name: Release + +jobs: + + create_release: + name: Create release + if: > + github.repository_owner == 'input-output-hk' + || startsWith(github.ref, 'refs/heads/ci/test/') + || startsWith(github.ref, 'refs/tags/') && contains(github.ref, '-ci-test.') + runs-on: ubuntu-latest + outputs: + version: ${{ steps.release_info.outputs.version }} + tag: ${{ steps.release_info.outputs.tag }} + date: ${{ steps.release_info.outputs.date }} + upload_url: ${{ steps.create_release.outputs.upload_url }} + steps: + - name: Checkout code + uses: actions/checkout@v3 + with: + submodules: 'true' + + - id: release_info + name: Get release information + run: python3 ./ci/release-info.py "$GITHUB_EVENT_NAME" + + - id: create_release + name: Create a draft release + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + release_tag='${{ steps.release_info.outputs.tag }}' + hub release create ${{ steps.release_info.outputs.release_flags }} --draft \ + -m "Release ${{ steps.release_info.outputs.version }} (in progress)" \ + -t $GITHUB_SHA $release_tag + upload_url=$(hub release show -f '%uA' $release_tag) + echo "::set-output name=upload_url::$upload_url" + + cache_info: + name: Bootstrap cache + if: > + github.repository_owner == 'input-output-hk' + || startsWith(github.ref, 'refs/heads/ci/test/') + || startsWith(github.ref, 'refs/tags/') && contains(github.ref, '-ci-test.') + runs-on: ubuntu-latest + outputs: + crates-io-index-head: ${{ steps.ls-crates-io-index.outputs.head }} + cargo-lock-hash: ${{ steps.hash-cargo-lock.outputs.hash }} + steps: + - name: Checkout code + uses: actions/checkout@v3 + + - id: ls-crates-io-index + name: Get head commit hash of crates.io registry index + run: | + commit=$( + git ls-remote --heads https://github.com/rust-lang/crates.io-index.git master | + cut -f 1 + ) + echo "$commit" + echo "::set-output name=head::$commit" + + - id: hash-cargo-lock + name: Calculate dependency cache key + run: | + hash=$( + ci/strip-own-version-from-cargo-lock.pl Cargo.lock | + sha1sum | cut -d ' ' -f 1 + ) + echo "$hash" + echo "::set-output name=hash::$hash" + + update_deps: + name: Update dependencies + needs: cache_info + # Caches on Windows and Unix do not interop: + # https://github.com/actions/cache/issues/330#issuecomment-637701649 + strategy: + matrix: + os: [ubuntu-latest, windows-latest] + runs-on: ${{ matrix.os }} + steps: + - name: Cache cargo registry index + uses: actions/cache@v3 + with: + path: ~/.cargo/registry/index + key: cargo-index-${{ needs.cache_info.outputs.crates-io-index-head }} + restore-keys: cargo-index- + + - id: cargo-deps + name: Cache cargo dependencies + uses: actions/cache@v3 + with: + path: ~/.cargo/registry/cache + key: cargo-deps-${{ needs.cache_info.outputs.cargo-lock-hash }} + + - name: Check out the repository + uses: actions/checkout@v3 + with: + submodules: true + + - name: Fetch dependencies and update cargo registry + run: cargo fetch --locked + + build_assets: + name: Build assets + needs: [create_release, cache_info, update_deps] + runs-on: ${{ matrix.config.os }} + strategy: + fail-fast: false + matrix: + config: + # Linux + - {os: ubuntu-latest, target: x86_64-unknown-linux-gnu} + # Macos + - {os: macos-latest, target: x86_64-apple-darwin} + toolchain: [stable] + cross: [false] + include: + - config: {os: windows-latest, target: x86_64-pc-windows-msvc} + toolchain: stable-x86_64-pc-windows-msvc + cross: false + # Cross targets + - config: {os: ubuntu-latest, target: x86_64-unknown-linux-musl} + toolchain: stable + cross: true + + steps: + + - uses: actions-rs/toolchain@v1 + with: + toolchain: ${{ matrix.toolchain }} + target: ${{ matrix.config.target }} + override: true + default: true + + - name: Checkout code + uses: actions/checkout@v3 + with: + submodules: true + + - name: Restore cargo registry index + uses: actions/cache@v3 + with: + path: ~/.cargo/registry/index + key: cargo-index-${{ needs.cache_info.outputs.crates-io-index-head }} + + - name: Restore cargo dependencies + uses: actions/cache@v3 + with: + path: ~/.cargo/registry/cache + key: cargo-deps-${{ needs.cache_info.outputs.cargo-lock-hash }} + + - name: Create .cargo/config.toml + shell: bash + run: | + mkdir .cargo + cat > .cargo/config.toml < Cross.toml < + --manifest-path vit-servicing-station-server/Cargo.toml + --bin vit-servicing-station-server + --verbose + --locked + --release + --target ${{ matrix.config.target }} + + - name: Build the cli binary + uses: actions-rs/cargo@v1 + env: + DATE: ${{ needs.create_release.outputs.date }} + with: + use-cross: ${{ matrix.cross }} + command: build + args: > + --manifest-path vit-servicing-station-cli/Cargo.toml + --bin vit-servicing-station-cli + --verbose + --locked + --release + --target ${{ matrix.config.target }} + + - name: Pack binaries (Unix) + if: matrix.config.os != 'windows-latest' + run: | + archive=vit-servicing-station-${{ needs.create_release.outputs.version }}-${{ matrix.config.target }}.tar.gz + tar -C ./target/${{ matrix.config.target }}/release -czvf $archive \ + vit-servicing-station-server \ + vit-servicing-station-cli + cat <> $GITHUB_ENV + RELEASE_ARCHIVE=$archive + RELEASE_CONTENT_TYPE=application/gzip + EOF + + - name: Pack binaries (Windows) + if: matrix.config.os == 'windows-latest' + run: | + $archive = "vit-servicing-station-${{ needs.create_release.outputs.version }}-${{ matrix.config.target }}.zip" + $args = @{ + Path = "./target/${{ matrix.config.target }}/release/vit-servicing-station-server.exe", + "./target/${{ matrix.config.target }}/release/vit-servicing-station-cli.exe" + DestinationPath = $archive + } + Compress-Archive @args + @" + RELEASE_ARCHIVE=$archive + RELEASE_CONTENT_TYPE=application/zip + "@ | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append + + - name: Upload binaries to the release + uses: actions/upload-release-asset@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + upload_url: ${{ needs.create_release.outputs.upload_url }} + asset_path: ./${{ env.RELEASE_ARCHIVE }} + asset_name: ${{ env.RELEASE_ARCHIVE }} + asset_content_type: ${{ env.RELEASE_CONTENT_TYPE }} + + publish_release: + name: Publish release + needs: [create_release, build_assets] + runs-on: ubuntu-latest + steps: + - name: Check out the repository + uses: actions/checkout@v3 + + - name: Publish release + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + hub release edit --draft=false \ + -m 'Release ${{ needs.create_release.outputs.version }}' \ + ${{ needs.create_release.outputs.tag }} diff --git a/src/vit-servicing-station-f10/.github/workflows/test.yml b/src/vit-servicing-station-f10/.github/workflows/test.yml new file mode 100644 index 0000000000..c7fe26cfb6 --- /dev/null +++ b/src/vit-servicing-station-f10/.github/workflows/test.yml @@ -0,0 +1,190 @@ +on: + push: + branches: + - master + pull_request: + +name: CI + +jobs: + + update-deps: + name: Update dependencies + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + + - id: cargo-deps + name: Cache cargo dependencies + uses: actions/cache@v3 + with: + path: | + ~/.cargo/registry/index + ~/.cargo/registry/cache + ~/.cargo/git/db + key: cargo-deps-${{ hashFiles('**/Cargo.lock') }} + + - if: ${{ steps.cargo-deps.outputs.cache-hit != 'true' }} + id: ls-crates-io-index + name: Get head commit hash of crates.io registry index + run: | + commit=$( + git ls-remote --heads https://github.com/rust-lang/crates.io-index.git master | + cut -f 1 + ) + echo "::set-output name=head::$commit" + - if: ${{ steps.cargo-deps.outputs.cache-hit != 'true' }} + name: Cache cargo registry index + uses: actions/cache@v3 + with: + path: ~/.cargo/registry/index + key: cargo-index-${{ steps.ls-crates-io-index.outputs.head }} + restore-keys: | + cargo-index- + + - if: ${{ steps.cargo-deps.outputs.cache-hit != 'true' }} + name: Fetch dependencies + run: cargo fetch --locked + + test: + name: Test Suite + needs: update-deps + strategy: + matrix: + os: [ubuntu-latest, windows-latest, macos-latest] + runs-on: ${{ matrix.os }} + env: + CARGO_INCREMENTAL: 0 + steps: + - uses: actions/checkout@v3 + + - if: ${{ runner.os == 'Windows' }} + name: Fix up Cargo.lock hash + run: | + Get-ChildItem . -Recurse -Filter Cargo.lock | + Foreach-Object { + ((Get-Content $_.FullName) -join "`n") + "`n" | + Set-Content -NoNewline $_.FullName + } + + - name: Restore cargo dependencies + uses: actions/cache@v3 + with: + path: | + ~/.cargo/registry/index + ~/.cargo/registry/cache + ~/.cargo/git/db + key: cargo-deps-${{ hashFiles('**/Cargo.lock') }} + + - uses: actions-rs/toolchain@v1 + with: + profile: minimal + toolchain: stable + override: true + + - name: Install libsqlite3 (Ubuntu) + if: matrix.os == 'ubuntu-latest' + run: | + sudo apt-get update + sudo apt-get install libsqlite3-dev + + - uses: actions-rs/cargo@v1 + continue-on-error: false + with: + command: build + args: --locked + - uses: actions-rs/cargo@v1 + continue-on-error: false + with: + command: test + args: --locked + + lints: + name: Rust lints + needs: update-deps + runs-on: ubuntu-latest + env: + CARGO_INCREMENTAL: 0 + steps: + - uses: actions/checkout@v3 + - uses: actions-rs/toolchain@v1 + with: + profile: minimal + toolchain: stable + override: true + components: rustfmt, clippy + + - name: Run cargo fmt + uses: actions-rs/cargo@v1 + continue-on-error: false + with: + command: fmt + args: -- --check + + - name: Restore cargo dependencies + uses: actions/cache@v3 + with: + path: | + ~/.cargo/registry/index + ~/.cargo/registry/cache + ~/.cargo/git/db + key: cargo-deps-${{ hashFiles('**/Cargo.lock') }} + + - name: Run cargo clippy + uses: actions-rs/clippy-check@v1 + continue-on-error: false + with: + token: ${{ secrets.GITHUB_TOKEN }} + args: -- --deny warnings + + test_coverage: + name: Test Coverage + needs: update-deps + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + with: + submodules: true + + - name: Install Rust toolchain + uses: actions-rs/toolchain@v1 + with: + profile: minimal + toolchain: stable + override: true + + - name: Restore cargo registry index + uses: actions/cache@v3 + with: + path: ~/.cargo/registry/index + key: cargo-index-v2-${{ needs.update_deps.outputs.crates-io-index-head }} + + - name: Restore dependency crates + uses: actions/cache@v3 + with: + path: ~/.cargo/registry/cache + key: cargo-deps-v2-${{ hashFiles('Cargo.lock') }} + + - name: Install libsqlite3 (Ubuntu) + if: matrix.os == 'ubuntu-latest' + run: | + sudo apt-get update + sudo apt-get install libsqlite3-dev + + - uses: actions-rs/cargo@v1 + continue-on-error: false + with: + command: build + args: --locked + + - name: Run cargo-tarpaulin + uses: actions-rs/tarpaulin@v0.1 + with: + args: '--ignore-tests --out Lcov --skip-clean --exclude-files vit-servicing-station-tests' + timeout: 600 + + - name: upload to Coveralls + uses: coverallsapp/github-action@master + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + path-to-lcov: './lcov.info' diff --git a/src/vit-servicing-station-f10/.github/workflows/update-flake-lock.yml b/src/vit-servicing-station-f10/.github/workflows/update-flake-lock.yml new file mode 100644 index 0000000000..73d8018626 --- /dev/null +++ b/src/vit-servicing-station-f10/.github/workflows/update-flake-lock.yml @@ -0,0 +1,19 @@ +name: update-flake-lock +on: + workflow_dispatch: # allows manual triggering + schedule: + - cron: '0 0 * * 0' # runs weekly on Sunday at 00:00 + +jobs: + lockfile: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v3 + - name: Install Nix + uses: cachix/install-nix-action@v16 + with: + extra_nix_config: | + access-tokens = github.com=${{ secrets.GITHUB_TOKEN }} + - name: Update flake.lock + uses: DeterminateSystems/update-flake-lock@v8 diff --git a/src/vit-servicing-station-f10/.gitignore b/src/vit-servicing-station-f10/.gitignore new file mode 100644 index 0000000000..5d7fee8e50 --- /dev/null +++ b/src/vit-servicing-station-f10/.gitignore @@ -0,0 +1,8 @@ +book +/target +/vendor +/result* +/.idea/ +/.vscode/ +/.direnv/ +/.pre-commit-config.yaml diff --git a/src/vit-servicing-station-f10/.gitmodules b/src/vit-servicing-station-f10/.gitmodules new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/vit-servicing-station-f10/.spectral.yaml b/src/vit-servicing-station-f10/.spectral.yaml new file mode 100644 index 0000000000..4b237f38a1 --- /dev/null +++ b/src/vit-servicing-station-f10/.spectral.yaml @@ -0,0 +1,4 @@ +extends: spectral:oas +rules: + # https://github.com/stoplightio/spectral/issues/1271 + oas3-unused-component: off diff --git a/src/vit-servicing-station-f10/Earthfile b/src/vit-servicing-station-f10/Earthfile new file mode 100644 index 0000000000..310ad4ec21 --- /dev/null +++ b/src/vit-servicing-station-f10/Earthfile @@ -0,0 +1,10 @@ +VERSION 0.7 + +build: + FROM ../../+builder + RUN cargo build --locked --release -p vit-servicing-station-cli-f10 -p vit-servicing-station-server-f10 + + # Store the artifacts + SAVE ARTIFACT target/release/vit-servicing-station-cli-f10 vit-servicing-station-cli-f10 + SAVE ARTIFACT target/release/vit-servicing-station-server-f10 vit-servicing-station-server-f10 + SAVE IMAGE --cache-hint \ No newline at end of file diff --git a/src/vit-servicing-station-f10/README.md b/src/vit-servicing-station-f10/README.md new file mode 100644 index 0000000000..a7e6d5bba7 --- /dev/null +++ b/src/vit-servicing-station-f10/README.md @@ -0,0 +1,124 @@ +# VIT Servicing Station + +-------------- + +VIT as a service (VaaS) + +-------------- + + +### Building tips and tricks + +We use [`diesel`](http://diesel.rs/) for database (`sqlite3`) integration. Please refer to the [`diesel_cli` documentation](https://docs.rs/crate/diesel_cli/) to understand how to operate with migrations and setup. + +Diesel generates rust code based on a *SQL* migration script (`/migrations/*/up.sql`) when running the migration with `diesel_cli`. +Diesel code generation is configured in the `diesel.toml` file. Right now it just contains the path on where the rust code should be generated. +Currently we use only one migration and make changes to it as needed. This is due to the fact that for each fund we spin up a new servicing station instance. + +Another file to look at is the `.env` file. This file holds the environment variables used by this project sql configuration. +`diesel` uses a `DATABASE_URL` variable to know where should he generate the database file. + +### Running tests + +Tests are run using `cargo test`, but require the binaries to be present in `target`: + - `cargo build --all-targets --locked` (**without** `--release`) + - `cargo test` + + +### Server settings + +The server settings can be loaded via three options, **environment variables**, **command line flags** and a **json file**. +These configurations follows some priority from low to high. +Env variables are overwritten by command line flags if used and those before are overwritten by the json file if used too. + +#### Env variables + +- `DATABASE_URL` -> `URL` for the database connection +- `TLS_CERT_FILE` -> Path to server X.509 certificate chain file, must be PEM-encoded and contain at least 1 item +- `TLS_PRIVATE_KEY_FILE` -> Path to server private key file, must be PKCS8 with single PEM-encoded, unencrypted key +- `CORS_ALLOWED_ORIGINS` -> Semicolon separated list of allowed `CORS` origins. For example: `https://foo.test;https://test.foo:5050` + +#### Command line flags +The command line flags can be retrieved using the `--help` when running the server: + +```bash +--address
Server binding address [default: 0.0.0.0:3030] +--allowed-origins If none provided, echos request origin [env: CORS_ALLOWED_ORIGINS=] +--block0-path block0 static file path [default: ./resources/v0/block0.bin] +--cert-file + Path to server X.509 certificate chain file, must be PEM-encoded and contain at least 1 item [env: + TLS_CERT_FILE=] +--db-url Database url [env: DATABASE_URL=] [default: ./db/database.sqlite3] +--in-settings-file Load settings from file +--log-level Application logging level +--log-output-path Output log file path +--max-age-secs If none provided, CORS responses won't be cached +--out-settings-file Dump current settings to file +--priv-key-file + Path to server private key file, must be PKCS8 with single PEM-encoded, unencrypted key [env: TLS_PK_FILE=] +``` + +Some of the flags default to the environment variables explained above is not set. +Some of them have default values as fallback in case nor the env variable nor the flag is set. + +#### JSON file configuration +Additionally if the you can load the whole configuration from a json file providing the path to the file within the `--in-settings-file`. +An example of the contents of the file would be like this: +```json +{ + "address" : "0.0.0.0:3030", + "tls" : { + "cert_file" : "./foo/bar.pem", + "priv_key_file" : "./bar/foo.pem" + }, + "cors" : { + "allowed_origins" : ["https://foo.test", "https://test.foo"], + "max_age_secs" : 60 + }, + "db_url": "./database.sqlite3", + "block0_path": "./test/bin.test", + "log" : { + "log_output_path" : "./server.log", + "log_level" : "error" + } +} +``` + +There is an option to dump a configuration into a `JSON` file with the `--out-settings-file` providing the path to the out file. +This option will dump the configuration with the defaults, already set environment variables or provided flags into the file. + +## CLI + +The `vit-servicing-station-cli` is an accompanying tool to interact with some of the ecosystem. +Right now it offers the following commands: + +### api-token + +#### generate +It is possible to generate api tokens (URL safe base64 encoded) with a simple command. For example: +```bash +❯ ./vit-servicing-station-cli api-token generate +F-4QxU3FrbH7qg +``` + +It can be combined with two (optional) arguments: +* `--n` number of tokens to generate +* `--size` length (in **bytes**) of the tokens + +#### add +We can add a token to some db using the tool too: + +```bash +./vit-servicing-station-cli api-token add --db-url ../../db/vit_station_new.db --tokens 1CNDAo43fo4ktQ 0wNbdTDMJCFcnw +``` + +We need to provide the url to the database where we want it to be inserted (with `--db-url`) and the tokens we want too +insert (with `--tokens` followed by the tokens). +Notice that the token is in the same URL safe base64 encoding as we generated in the previous command. + +**If not specifying** the `--tokens` argument the cli will read the input from the standard input the tokens we want to insert. +This enables the command to be piped from another command, for example: + +```bash +./vit-servicing-station-cli api-token generate --size 10 --n 10 | ./vit-servicing-station-cli api-token add --db-url ../../db/vit_station_new.db +``` diff --git a/src/vit-servicing-station-f10/ci/release-info.py b/src/vit-servicing-station-f10/ci/release-info.py new file mode 100644 index 0000000000..a2fe804078 --- /dev/null +++ b/src/vit-servicing-station-f10/ci/release-info.py @@ -0,0 +1,94 @@ +import json +import os +import re +import sys +from datetime import date +from subprocess import Popen, PIPE + +def check_version(crate): + # Checks package version for matching with the current tag reference + if ref is not None and ref != 'refs/tags/v' + str(crate[0]): + return 0 + else: + return 1 + +def print_error(crate, match): + # Print errors for packages which versions didn't match tag reference + if not match: + print( + '::error file={path}::version {version} does not match release tag {tag}' + .format(tag = ref, version = str(crate[0]), path = str(crate[1])) + ) + +def bundle_version(crates): + # Reads package versions from workspace manifest file + channel = Popen( + ['cargo', 'metadata', '--format-version=1', '--no-deps'], + stdout=PIPE + ) + + # parse json data + data = json.load(channel.stdout).get('packages') + + # read, map and assign workspace crates versions to bundle package versions + for package, _ in enumerate(data): + if data[package]['name'] in crates: + crates[data[package]['name']].append(data[package]['version']) + crates[data[package]['name']].append(data[package]['manifest_path']) + + # Checks package versions of the crates bundle for consistency with the given tag reference + consistency = list(map(check_version, list(crates.values()))) + + # Print errors for packages which versions didn't match tag reference + if not all(consistency): + list(map(print_error, list(crates.values()), consistency)) + sys.exit(1) + elif all(consistency): + version = list(crates.values())[0][0] + return version + + +event_name = sys.argv[1] + +date = date.today().strftime('%Y%m%d') + +ref = None +if event_name == 'push': + ref = os.getenv('GITHUB_REF') + if ref.startswith('refs/tags/'): + release_type = 'tagged' + elif ref == 'refs/heads/ci/test/nightly': + # emulate the nightly workflow + release_type = 'nightly' + ref = None + else: + raise ValueError('unexpected ref ' + ref) +elif event_name == 'schedule': + release_type = 'nightly' +else: + raise ValueError('unexpected event name ' + event_name) + + +# Cargo workspace crates/packages for versioning bundle +crates = { + 'vit-servicing-station-cli-f10':[], + 'vit-servicing-station-lib-f10':[], + 'vit-servicing-station-server-f10':[], + 'vit-servicing-station-tests-f10':[] +} + +version = bundle_version(crates) +release_flags = '' +if release_type == 'tagged': + tag = 'v' + version +elif release_type == 'nightly': + version = re.sub( + r'^(\d+\.\d+\.\d+)(-.*)?$', + r'\1-nightly.' + date, + version, + ) + tag = 'nightly.' + date + release_flags = '--prerelease' + +for name in 'version', 'date', 'tag', 'release_type', 'release_flags': + print('::set-output name={0}::{1}'.format(name, globals()[name])) diff --git a/src/vit-servicing-station-f10/ci/strip-own-version-from-cargo-lock.pl b/src/vit-servicing-station-f10/ci/strip-own-version-from-cargo-lock.pl new file mode 100755 index 0000000000..70729f33ed --- /dev/null +++ b/src/vit-servicing-station-f10/ci/strip-own-version-from-cargo-lock.pl @@ -0,0 +1,16 @@ +#!/usr/bin/perl -p + +BEGIN { + $ln = 0; $ours = 0; +} + +if (/^\[\[package\]\]/ .. ($ln == 2)) { + if (/^name = "vit-servicing-station-.*"/) { + $ours = 1; + } else { + s/^version =.*// if $ours; + } + ++$ln; +} else { + $ln = 0; $ours = 0; +} diff --git a/src/vit-servicing-station-f10/default.nix b/src/vit-servicing-station-f10/default.nix new file mode 100644 index 0000000000..c7d0c267d2 --- /dev/null +++ b/src/vit-servicing-station-f10/default.nix @@ -0,0 +1,14 @@ +( + import + ( + let + lock = builtins.fromJSON (builtins.readFile ./flake.lock); + in + fetchTarball { + url = "https://github.com/edolstra/flake-compat/archive/${lock.nodes.flake-compat.locked.rev}.tar.gz"; + sha256 = lock.nodes.flake-compat.locked.narHash; + } + ) + {src = ./.;} +) +.defaultNix diff --git a/src/vit-servicing-station-f10/diesel.toml b/src/vit-servicing-station-f10/diesel.toml new file mode 100644 index 0000000000..8f685b3408 --- /dev/null +++ b/src/vit-servicing-station-f10/diesel.toml @@ -0,0 +1,5 @@ +# For documentation on how to configure this file, +# see diesel.rs/guides/configuring-diesel-cli + +[print_schema] +file = "vit-servicing-station-lib-f10/src/db/schema.rs" diff --git a/src/vit-servicing-station-f10/doc/api/v0.yaml b/src/vit-servicing-station-f10/doc/api/v0.yaml new file mode 100644 index 0000000000..45534ea252 --- /dev/null +++ b/src/vit-servicing-station-f10/doc/api/v0.yaml @@ -0,0 +1,793 @@ +openapi: 3.0.3 + +info: + title: VIT as a Service Rest API + description: Voting Implementation Testnet Rest API v0 + version: 0.2.2 + contact: + url: "http://github.com/input-output-hk/vit-servicing-station" + +tags: + - name: fund + description: Information on treasury fund campaigns. + - name: challenge + description: Information on challenges, structuring proposals within a fund. + - name: proposal + description: Information on funding proposals. + - name: reviews + description: Information on reviews. + - name: snapshot + description: Continuous snapshot related information. + +servers: + - url: "http://localhost" + +paths: + /api/v0/fund: + get: + operationId: getCurrentFund + summary: Get available fund + tags: [fund] + description: | + Retrieves information on the current treasury fund campaign. + responses: + "200": + description: Valid response + content: + application/json: + schema: + allOf: + - $ref: "#/components/schemas/Fund" + - $ref: "#/components/schemas/NextFundInfo" + + /api/v0/fund/{id}: + get: + operationId: getFund + summary: Get fund by id + tags: [fund] + description: | + Retrieves information on the identified treasury fund campaign. + parameters: + - in: path + name: id + schema: + type: integer + required: true + responses: + "200": + description: Valid response + content: + application/json: + schema: + $ref: "#/components/schemas/Fund" + "404": + description: The requested fund was not found + + /api/v0/funds: + get: + operationId: getFunds + summary: Get list of the fund id + tags: [fund] + description: | + Get list of all the funds in the db. + responses: + "200": + description: Valid response + content: + application/json: + schema: + type: array + items: + properties: + id: + type: integer + format: int32 + description: Identifier of the fund campaign. + "404": + description: The requested fund was not found + + /api/v0/proposals: + post: + operationId: getProposalsByChainInfo + summary: Get proposal by id + tags: [ proposal ] + description: | + Retrieves queried proposals. + requestBody: + description: List of voteplan id and indexes query + required: true + content: + application/json: + schema: + $ref: '#/components/schemas/ProposalsByVoteplanIdAndIndexQuery' + responses: + "200": + description: Valid response + content: + application/json: + schema: + $ref: "#/components/schemas/ProposalWithChallengeInfo" + + get: + operationId: getAllProposals + summary: Get all available proposals + tags: [proposal] + description: | + Lists all available proposals. + responses: + "200": + description: Valid response + content: + application/json: + schema: + items: + $ref: "#/components/schemas/ProposalWithChallengeInfo" + + /api/v0/proposals/{id}: + get: + operationId: getProposal + summary: Get proposal by id + tags: [proposal] + description: | + Retrieves information on the identified proposal. + parameters: + - in: path + name: id + schema: + type: integer + required: true + responses: + "200": + description: Valid response + content: + application/json: + schema: + $ref: "#/components/schemas/ProposalWithChallengeInfo" + "404": + description: The requested proposal was not found + + /api/v0/challenges: + get: + operationId: getAllChallenges + summary: Get all available challenges + tags: [challenge] + description: | + Lists all available challenges following insertion order. + responses: + "200": + description: Valid response + content: + application/json: + schema: + items: + $ref: "#/components/schemas/Challenge" + "404": + description: The requested challenge was not found + + /api/v0/challenges/{id}: + get: + operationId: getChallenge + summary: Get challenge by id + tags: [challenge] + description: | + Retrieves information on the identified challenge, + including the proposals submitted for it. + parameters: + - in: path + name: id + schema: + type: integer + required: true + responses: + "200": + description: Valid response + content: + application/json: + schema: + $ref: "#/components/schemas/ChallengeWithProposals" + "404": + description: The requested challenge was not found + + /api/v0/reviews/{proposal_id}: + get: + operationId: getProposalReviews + summary: Get reviews related to a proposal + tags: [reviews] + description: | + Retrieves advisor reviews information for the provided proposal id. + parameters: + - in: path + name: proposal_id + schema: + type: integer + required: true + responses: + "200": + description: Valid response + content: + application/json: + schema: + $ref: "#/components/schemas/AdvisorReviews" + + /api/v0/snapshot/{tag}/{voting_key}: + get: + operationId: getVotingPower + summary: Get voting power by voting key + tags: [snapshot] + description: | + Get voting power by voting key + parameters: + - in: path + name: tag + schema: + type: string + required: true + - in: path + name: voting_key + schema: + type: string + required: true + responses: + "200": + description: Success + content: + application/json: + schema: + $ref: "#/components/schemas/VotingPowers" + "400": + description: Not found + + /api/v0/snapshot: + get: + operationId: getSnapshotTags + summary: Get list of available versions + tags: [snapshot] + description: | + Get list of available snapshot versions, which can be used to retrieve + voting power + responses: + "200": + description: Success + + /api/v0/admin/snapshot/{tag}: + put: + operationId: updateSnapshot + summary: Replace the snapshot data for the given tag + tags: [snapshot] + description: | + Replace the snapshot data for the given tag + parameters: + - in: path + name: tag + schema: + type: string + required: true + requestBody: + description: list of VoterHIR entries in json format + required: true + content: + application/json: + schema: + type: array + + responses: + "200": + description: Success + + /api/v0/admin/fund: + put: + operationId: putFund + summary: Update or create fund + tags: [fund] + description: | + Update or replace the fund in the db with the one provided. + parameters: + - in: path + name: id + schema: + type: integer + required: true + responses: + "200": + description: Valid response + "400": + description: The input is malformed. + + +components: + schemas: + Fund: + properties: + id: + type: integer + format: int32 + description: Identifier of the fund campaign. + fund_name: + type: string + description: Human-readable name of the fund campaign. + fund_goal: + type: string + description: Description of the campaign's goals. + voting_power_info: + type: string + deprecated: true + description: Deprecated, same as registration_snapshot_time. + voting_power_threshold: + type: integer + format: int64 + description: | + Minimal amount of funds required for a valid voter registration. + This amount is in lovelace. + rewards_info: + type: string + fund_start_time: + type: string + format: date-time + description: Date and time for the start of the current voting period. + fund_end_time: + type: string + format: date-time + description: Date and time for the end of the current voting period. + next_fund_start_time: + type: string + format: date-time + description: Date and time for the start of the next voting period. + registration_snapshot_time: + type: string + format: date-time + description: Date and time for blockchain state snapshot capturing voter registrations. + next_registration_snapshot_time: + type: string + format: date-time + description: Date and time for next blockchain state snapshot capturing voter registrations. + chain_vote_plans: + type: array + items: + $ref: "#/components/schemas/VotePlan" + description: Vote plans registered for voting in this fund campaign. + challenges: + type: array + items: + $ref: "#/components/schemas/Challenge" + description: A list of campaign challenges structuring the proposals. + goals: + type: array + items: + $ref: "#/components/schemas/Goal" + description: The list of campaign goals for this fund. + insight_sharing_start: + type: string + format: date-time + proposal_submission_start: + type: string + format: date-time + refine_proposals_start: + type: string + format: date-time + finalize_proposals_start: + type: string + format: date-time + proposal_assessment_start: + type: string + format: date-time + assessment_qa_start: + type: string + format: date-time + snapshot_start: + type: string + format: date-time + voting_start: + type: string + format: date-time + voting_end: + type: string + format: date-time + tallying_end: + type: string + format: date-time + + VotePlan: + properties: + id: + type: integer + format: int32 + description: API identifier of the vote plan. + chain_voteplan_id: + type: string + format: hash + description: Blockchain ID of the vote plan transaction. + chain_vote_start_time: + type: string + format: date-time + description: Date and time for the start of voting on this vote plan. + chain_vote_end_time: + type: string + format: date-time + description: Date and time for the end of voting on this vote plan. + chain_committee_end_time: + type: string + format: date-time + description: Date and time for the end of tallying on this vote plan. + chain_voteplan_payload: + type: string + description: | + Whether the voting is done using the public or the privacy-preserving protocol. + fund_id: + type: integer + format: int32 + description: The fund ID this vote plan belongs to. + + Proposal: + properties: + internal_id: + type: integer + format: int32 + description: The API identifier for this proposal. + proposal_id: + type: string + description: Unique identifier for this proposal. + proposal_category: + type: object + properties: + category_id: + type: string + category_name: + type: string + category_description: + type: string + proposal_title: + type: string + description: Short title of the proposal. + proposal_summary: + type: string + description: Brief description of the proposal. + proposal_public_key: + type: string + format: binary + proposal_funds: + type: integer + format: int64 + description: The amount of funds requested by this proposal. + proposal_url: + type: string + description: URL to a web page with details on this proposal. + proposal_files: + type: string + proposer: + type: object + properties: + proposer_name: + type: string + description: Name of the author of the proposal. + proposer_email: + type: string + description: Email address of the author of the proposal. + proposer_url: + type: string + description: URL to a web resource with details about the author of the proposal. + chain_proposal_id: + type: string + description: Identifier of the proposal on the blockchain. + chain_proposal_index: + type: integer + format: int64 + description: Index of the proposal in the vote plan. + chain_vote_options: + description: Map of named vote options to choice indices. + type: object + chain_voteplan_id: + type: string + description: Identifier of the vote plan this proposal belongs to. + chain_vote_start_time: + type: string + format: date-time + description: Date and time for the start of voting on this proposal's vote plan. + chain_vote_end_time: + type: string + format: date-time + description: Date and time for the start of voting on this proposal's vote plan. + chain_committee_end_time: + type: string + format: date-time + description: Date and time for the end of tallying on this proposal's vote plan. + chain_voteplan_payload: + type: string + description: | + Whether the voting is done using the public or the privacy-preserving protocol. + + ChallengeType: + type: string + enum: + - simple + - community-choice + + ProposalWithChallengeInfo: + discriminator: + propertyName: challenge_type + mapping: + simple: "#/components/schemas/SimpleProposal" + community-choice: "#/components/schemas/CommunityChoiceProposal" + allOf: + - $ref: "#/components/schemas/Proposal" + - type: object + properties: + fund_id: + type: integer + format: int32 + challenge_id: + type: integer + format: int32 + reviews_count: + type: integer + format: int32 + description: Total amount of individual reviews per assessor + challenge_type: + $ref: "#/components/schemas/ChallengeType" + + SimpleProposal: + allOf: + - $ref: "#/components/schemas/ProposalWithChallengeInfo" + - type: object + properties: + proposal_solution: + type: string + example: + { + "internal_id": 22, + "proposal_id": "4af0e6b3452cd4ee822b2ec1859fd57b5512f85c14875f408081aa9b796dfc6e", + "proposal_title": "Authentication for DeepFake Defense", + "proposal_summary": "Deepfake videos are dangerous.", + "proposal_solution": "We will create a cryptographic proof on Cardano that verifies videos are real by connecting their blockchain ID.", + "proposal_public_key": "Fvd8zI3DH85qnaChQE6Aymt1diMJP32LB0AdpheZh/Q=", + "proposal_funds": 12000, + "proposal_url": "http://ideascale.com/t/UM5UZBd2t", + "proposal_files_url": "", + "proposal_impact_score": 0, + "proposer": + { + "proposer_name": "Community Member", + "proposer_email": "example@vit.iohk.io", + "proposer_url": "", + "proposer_relevant_experience": "Cryptography student, website development, blockchain technologist.", + }, + "chain_proposal_id": "4af0e6b3452cd4ee822b2ec1859fd57b5512f85c14875f408081aa9b796dfc6e", + "chain_proposal_index": 0, + "chain_vote_options": { "blank": 0, "yes": 1, "no": 2 }, + "chain_voteplan_id": "b1eeb620baf1445672f6c9422481aff0f6babaf775760d187a7703027e098166", + "chain_vote_start_time": "2021-02-10T14:40:27+00:00", + "chain_vote_end_time": "2021-02-11T10:10:27+00:00", + "chain_committee_end_time": "2021-02-11T11:40:27+00:00", + "chain_voteplan_payload": "public", + "chain_vote_encryption_key": "", + "fund_id": 20, + "challenge_id": 2, + "challenge_type": "simple", + } + + CommunityChoiceProposal: + allOf: + - $ref: "#/components/schemas/ProposalWithChallengeInfo" + - type: object + properties: + proposal_brief: + type: string + proposal_importance: + type: string + proposal_goal: + type: string + proposal_metrics: + type: string + example: + { + "internal_id": 31, + "proposal_id": "494d8d685e3b195eb5610494f1721db7747df0517cb1b6a705bb3cebfef3c998", + "proposal_title": "A for ADA Cryptoalphabet 4 children", + "proposal_summary": "How to increase general awareness about Cardano and cryptocurrencies?\nHow to make fun community-building incentives?", + "proposal_brief": "A for ADA", + "proposal_importance": "We need to get them while they're young.", + "proposal_goal": "Nebulous.", + "proposal_metrics": "\\- Number of people engaged into the creation of Cryptoalphabet", + "proposal_public_key": "zqUCWwguCt6+NHYjkpvasvccuA7l2SuabE+1C0bzf3Y=", + "proposal_funds": 4800, + "proposal_url": "http://ideascale.com/t/UM5UZBd1p", + "proposal_files_url": "", + "proposal_impact_score": 133, + "proposer": + { + "proposer_name": "Community Member", + "proposer_email": "example@vit.iohk.io", + "proposer_url": "", + "proposer_relevant_experience": "", + }, + "chain_proposal_id": "494d8d685e3b195eb5610494f1721db7747df0517cb1b6a705bb3cebfef3c998", + "chain_proposal_index": 9, + "chain_vote_options": { "no": 2, "yes": 1, "blank": 0 }, + "chain_voteplan_id": "b1eeb620baf1445672f6c9422481aff0f6babaf775760d187a7703027e098166", + "chain_vote_start_time": "2021-02-10T14:40:27+00:00", + "chain_vote_end_time": "2021-02-11T10:10:27+00:00", + "chain_committee_end_time": "2021-02-11T11:40:27+00:00", + "chain_voteplan_payload": "public", + "chain_vote_encryption_key": "", + "fund_id": 20, + "challenge_id": 1, + "challenge_type": "community-choice", + } + + Challenge: + properties: + id: + type: integer + format: int32 + challenge_type: + $ref: "#/components/schemas/ChallengeType" + title: + type: string + description: + type: string + rewards_total: + type: integer + format: int64 + fund_id: + type: integer + format: int32 + challenge_url: + type: string + highlights: + $ref: "#/components/schemas/ChallengeHighlights" + + ChallengeWithProposals: + allOf: + - $ref: "#/components/schemas/Challenge" + - type: object + properties: + proposals: + type: array + items: + $ref: "#/components/schemas/Proposal" + + AdvisorReview: + properties: + id: + type: integer + format: i32 + proposal_id: + type: integer + format: i32 + assessor: + type: string + impact_alignment_rating_given: + $ref: "#/components/schemas/Rating" + impact_alignment_note: + type: string + feasibility_rating_given: + $ref: "#/components/schemas/Rating" + feasibility_note: + type: string + auditability_rating_given: + $ref: "#/components/schemas/Rating" + auditability_note: + type: string + ranking: + description: Measure of quality of this review according to veteran community advisors + type: string + enum: + - Excellent + - Good + - FilteredOut + - NA + + Rating: + type: integer + format: i32 + minimum: 0 + maximum: 500 + description: Rating in range [0, 500] (0 stars to 5 stars) + + AdvisorReviews: + type: array + items: + $ref: "#/components/schemas/AdvisorReview" + example: + [ + { + "id": 1, + "proposal_id": 1234, + "rating_given": 0, + "assessor": "za_assessor_432", + "note": "foo bar", + "tag": "Alignment", + }, + ] + + ChallengeHighlights: + properties: + sponsor: + type: string + + ProposalsByVoteplanIdAndIndexQuery: + type: array + items: + $ref: "#/components/schemas/ProposalVoteplanIdAndIndex" + + ProposalVoteplanIdAndIndex: + properties: + voteplan_id: + type: string + indexes: + type: array + items: + type: integer + format: i64 + + VotingPowers: + type: array + items: + $ref: "#/components/schemas/VotingPower" + example: + [ + { + "voting_power": 1000, + "voting_group": "representative", + }, + ] + + VotingPower: + properties: + voting_power: + type: integer + format: u64 + voting_group: + type: string + + NextFundInfo: + properties: + next: + properties: + id: + type: integer + format: int32 + description: Identifier of the fund campaign. + fund_name: + type: string + description: Human-readable name of the fund campaign. + insight_sharing_start: + type: string + format: date-time + proposal_submission_start: + type: string + format: date-time + refine_proposals_start: + type: string + format: date-time + finalize_proposals_start: + type: string + format: date-time + proposal_assessment_start: + type: string + format: date-time + assessment_qa_start: + type: string + format: date-time + snapshot_start: + type: string + format: date-time + voting_start: + type: string + format: date-time + voting_end: + type: string + format: date-time + tallying_end: + type: string + format: date-time + + Goal: + properties: + id: + type: integer + format: int32 + goal_name: + type: string + fund_id: + type: integer + format: int32 diff --git a/src/vit-servicing-station-f10/docker/master/Dockerfile b/src/vit-servicing-station-f10/docker/master/Dockerfile new file mode 100644 index 0000000000..5652fc1738 --- /dev/null +++ b/src/vit-servicing-station-f10/docker/master/Dockerfile @@ -0,0 +1,35 @@ +# Simple dockerfile example to build a vit server + +FROM ubuntu:18.04 +LABEL MAINTAINER IOHK +LABEL description="Vit servicing station server" + +ARG PREFIX=/app +ENV ENV_PREFIX="vit_server_env" + +COPY database.db /data/database.db +COPY block0.bin /data/block0.bin + + +# prepare the environment +RUN apt-get update && \ + apt-get install -y git curl && \ + mkdir -p ${ENV_PREFIX} && \ + cd ${ENV_PREFIX} && \ + git clone --recurse-submodules https://github.com/input-output-hk/vit-servicing-station src + +#install rustup +RUN apt-get install -y build-essential pkg-config libssl-dev && \ + bash -c "curl https://sh.rustup.rs -sSf | bash -s -- -y" && \ + ~/.cargo/bin/rustup install stable && \ + ~/.cargo/bin/rustup default stable + + +# install the node and jcli from source +RUN cd ${ENV_PREFIX}/src && \ + git submodule update --init --recursive && \ + ~/.cargo/bin/cargo build --all --release --locked && \ + ~/.cargo/bin/cargo install --path vit-servicing-station-server + + +CMD ["bash", "-c", "~/.cargo/bin/vit-servicing-station-server --db-url /data/database.db --block0-path /data/block0.bin --log-output-path vit-servicing-station.log --log-level info"] diff --git a/src/vit-servicing-station-f10/docker/master/block0.bin b/src/vit-servicing-station-f10/docker/master/block0.bin new file mode 100644 index 0000000000..0e520a00e8 Binary files /dev/null and b/src/vit-servicing-station-f10/docker/master/block0.bin differ diff --git a/src/vit-servicing-station-f10/docker/master/database.db b/src/vit-servicing-station-f10/docker/master/database.db new file mode 100644 index 0000000000..f9cee38b16 Binary files /dev/null and b/src/vit-servicing-station-f10/docker/master/database.db differ diff --git a/src/vit-servicing-station-f10/docker/soak_tests/Dockerfile b/src/vit-servicing-station-f10/docker/soak_tests/Dockerfile new file mode 100644 index 0000000000..a2ef87a321 --- /dev/null +++ b/src/vit-servicing-station-f10/docker/soak_tests/Dockerfile @@ -0,0 +1,30 @@ +# Simple dockerfile example to build a vit server + +FROM ubuntu:18.04 +LABEL MAINTAINER IOHK +LABEL description="VIT server soak tests" + +ARG PREFIX=/app +ENV ENV_PREFIX="vit_server_env" + +# prepare the environment +RUN apt-get update && \ + apt-get install -y git curl && \ + mkdir -p ${ENV_PREFIX} && \ + cd ${ENV_PREFIX} && \ + git clone --recurse-submodules https://github.com/input-output-hk/vit-servicing-station src + +#install rustup +RUN apt-get install -y build-essential pkg-config libssl-dev && \ + bash -c "curl https://sh.rustup.rs -sSf | bash -s -- -y" && \ + ~/.cargo/bin/rustup install stable && \ + ~/.cargo/bin/rustup default stable + + +# install the node and jcli from source +RUN cd ${ENV_PREFIX}/src && \ + git submodule update --init --recursive && \ + ~/.cargo/bin/cargo build --all --release --locked + +WORKDIR ${ENV_PREFIX}/src/vit-servicing-station-tests +RUN ["bash", "-c", "~/.cargo/bin/cargo test rest_load_long --features non-functional --release -- --nocapture"] diff --git a/src/vit-servicing-station-f10/flake.lock b/src/vit-servicing-station-f10/flake.lock new file mode 100644 index 0000000000..bea86409f4 --- /dev/null +++ b/src/vit-servicing-station-f10/flake.lock @@ -0,0 +1,150 @@ +{ + "nodes": { + "flake-compat": { + "flake": false, + "locked": { + "lastModified": 1650374568, + "narHash": "sha256-Z+s0J8/r907g149rllvwhb4pKi8Wam5ij0st8PwAh+E=", + "owner": "edolstra", + "repo": "flake-compat", + "rev": "b4a34015c698c7793d592d66adbab377907a2be8", + "type": "github" + }, + "original": { + "owner": "edolstra", + "repo": "flake-compat", + "type": "github" + } + }, + "flake-utils": { + "locked": { + "lastModified": 1656928814, + "narHash": "sha256-RIFfgBuKz6Hp89yRr7+NR5tzIAbn52h8vT6vXkYjZoM=", + "owner": "numtide", + "repo": "flake-utils", + "rev": "7e2a3b3dfd9af950a856d66b0a7d01e3c18aa249", + "type": "github" + }, + "original": { + "owner": "numtide", + "repo": "flake-utils", + "type": "github" + } + }, + "gitignore": { + "inputs": { + "nixpkgs": [ + "nixpkgs" + ] + }, + "locked": { + "lastModified": 1646480205, + "narHash": "sha256-kekOlTlu45vuK2L9nq8iVN17V3sB0WWPqTTW3a2SQG0=", + "owner": "hercules-ci", + "repo": "gitignore.nix", + "rev": "bff2832ec341cf30acb3a4d3e2e7f1f7b590116a", + "type": "github" + }, + "original": { + "owner": "hercules-ci", + "repo": "gitignore.nix", + "type": "github" + } + }, + "naersk": { + "inputs": { + "nixpkgs": [ + "nixpkgs" + ] + }, + "locked": { + "lastModified": 1655042882, + "narHash": "sha256-9BX8Fuez5YJlN7cdPO63InoyBy7dm3VlJkkmTt6fS1A=", + "owner": "nix-community", + "repo": "naersk", + "rev": "cddffb5aa211f50c4b8750adbec0bbbdfb26bb9f", + "type": "github" + }, + "original": { + "owner": "nix-community", + "repo": "naersk", + "type": "github" + } + }, + "nixpkgs": { + "locked": { + "lastModified": 1657447684, + "narHash": "sha256-FCP9AuU1q6PE3vOeM5SFf58f/UKPBAsoSGDUGamNBbo=", + "owner": "NixOS", + "repo": "nixpkgs", + "rev": "5f43d8b088d3771274bcfb69d3c7435b1121ac88", + "type": "github" + }, + "original": { + "owner": "NixOS", + "ref": "nixos-unstable", + "repo": "nixpkgs", + "type": "github" + } + }, + "pre-commit-hooks": { + "inputs": { + "flake-utils": [ + "flake-utils" + ], + "nixpkgs": [ + "nixpkgs" + ] + }, + "locked": { + "lastModified": 1656169028, + "narHash": "sha256-y9DRauokIeVHM7d29lwT8A+0YoGUBXV3H0VErxQeA8s=", + "owner": "cachix", + "repo": "pre-commit-hooks.nix", + "rev": "db3bd555d3a3ceab208bed48f983ccaa6a71a25e", + "type": "github" + }, + "original": { + "owner": "cachix", + "repo": "pre-commit-hooks.nix", + "type": "github" + } + }, + "root": { + "inputs": { + "flake-compat": "flake-compat", + "flake-utils": "flake-utils", + "gitignore": "gitignore", + "naersk": "naersk", + "nixpkgs": "nixpkgs", + "pre-commit-hooks": "pre-commit-hooks", + "rust-overlay": "rust-overlay" + } + }, + "rust-overlay": { + "inputs": { + "flake-utils": [ + "flake-utils" + ], + "nixpkgs": [ + "nixpkgs" + ] + }, + "locked": { + "lastModified": 1657507721, + "narHash": "sha256-FtV5D35ikz7zvhBX66Bs3VKd/GDyIVA+WQzqkR1PS0E=", + "owner": "oxalica", + "repo": "rust-overlay", + "rev": "1a133f54a0229af8310879eac2c4a82c0576a0b9", + "type": "github" + }, + "original": { + "owner": "oxalica", + "repo": "rust-overlay", + "type": "github" + } + } + }, + "root": "root", + "version": 7 +} diff --git a/src/vit-servicing-station-f10/flake.nix b/src/vit-servicing-station-f10/flake.nix new file mode 100644 index 0000000000..997b4b36a5 --- /dev/null +++ b/src/vit-servicing-station-f10/flake.nix @@ -0,0 +1,168 @@ +{ + inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable"; + inputs.flake-compat.url = "github:edolstra/flake-compat"; + inputs.flake-compat.flake = false; + inputs.flake-utils.url = "github:numtide/flake-utils"; + inputs.gitignore.url = "github:hercules-ci/gitignore.nix"; + inputs.gitignore.inputs.nixpkgs.follows = "nixpkgs"; + inputs.pre-commit-hooks.url = "github:cachix/pre-commit-hooks.nix"; + inputs.pre-commit-hooks.inputs.nixpkgs.follows = "nixpkgs"; + inputs.pre-commit-hooks.inputs.flake-utils.follows = "flake-utils"; + inputs.rust-overlay.url = "github:oxalica/rust-overlay"; + inputs.rust-overlay.inputs.flake-utils.follows = "flake-utils"; + inputs.rust-overlay.inputs.nixpkgs.follows = "nixpkgs"; + inputs.naersk.url = "github:nix-community/naersk"; + inputs.naersk.inputs.nixpkgs.follows = "nixpkgs"; + + nixConfig.extra-substituters = [ + "https://hydra.iohk.io" + "https://vit.cachix.org" + ]; + nixConfig.extra-trusted-public-keys = [ + "hydra.iohk.io:f/Ea+s+dFdN+3Y/G+FDgSq+a5NEWhJGzdjvKNGv0/EQ=" + "vit.cachix.org-1:tuLYwbnzbxLzQHHN0fvZI2EMpVm/+R7AKUGqukc6eh8=" + ]; + + outputs = { + self, + nixpkgs, + flake-compat, + flake-utils, + gitignore, + pre-commit-hooks, + rust-overlay, + naersk, + }: + flake-utils.lib.eachSystem + [ + flake-utils.lib.system.x86_64-linux + flake-utils.lib.system.aarch64-linux + ] + ( + system: let + readTOML = file: builtins.fromTOML (builtins.readFile file); + workspaceCargo = readTOML ./Cargo.toml; + + pkgs = import nixpkgs { + inherit system; + overlays = [(import rust-overlay)]; + }; + + rust = let + _rust = pkgs.rust-bin.stable.latest.default.override { + extensions = [ + "rust-src" + "rust-analysis" + "rls-preview" + "rustfmt-preview" + "clippy-preview" + ]; + }; + in + pkgs.buildEnv { + name = _rust.name; + inherit (_rust) meta; + buildInputs = [pkgs.makeWrapper]; + paths = [_rust]; + pathsToLink = ["/" "/bin"]; + # XXX: This is needed because cargo and clippy commands need to + # also be aware of other binaries in order to work properly. + # https://github.com/cachix/pre-commit-hooks.nix/issues/126 + postBuild = '' + for i in $out/bin/*; do + wrapProgram "$i" --prefix PATH : "$out/bin" + done + ''; + }; + + naersk-lib = naersk.lib."${system}".override { + cargo = rust; + rustc = rust; + }; + + mkPackage = name: let + pkgCargo = readTOML ./${name}/Cargo.toml; + cargoOptions = [ + "--package" + name + ]; + in + naersk-lib.buildPackage { + root = gitignore.lib.gitignoreSource self; + + cargoBuildOptions = x: x ++ cargoOptions; + cargoTestOptions = x: x ++ cargoOptions; + + PROTOC = "${pkgs.protobuf}/bin/protoc"; + PROTOC_INCLUDE = "${pkgs.protobuf}/include"; + + nativeBuildInputs = with pkgs; [ + pkg-config + protobuf + rustfmt + ]; + + buildInputs = with pkgs; [ + openssl + ]; + }; + + workspace = + builtins.listToAttrs + ( + builtins.map + (name: { + inherit name; + value = mkPackage name; + }) + workspaceCargo.workspace.members + ); + + pre-commit = pre-commit-hooks.lib.${system}.run { + src = self; + hooks = { + alejandra = { + enable = true; + }; + rustfmt = { + enable = true; + entry = pkgs.lib.mkForce "${rust}/bin/cargo-fmt fmt -- --check --color always"; + }; + }; + }; + + warnToUpdateNix = pkgs.lib.warn "Consider updating to Nix > 2.7 to remove this warning!"; + in rec { + packages = + workspace + // { + default = workspace.vit-servicing-station-server; + }; + + devShells.default = pkgs.mkShell { + PROTOC = "${pkgs.protobuf}/bin/protoc"; + PROTOC_INCLUDE = "${pkgs.protobuf}/include"; + buildInputs = + [rust] + ++ (with pkgs; [ + pkg-config + openssl + protobuf + ]); + shellHook = + pre-commit.shellHook + + '' + echo "=== vit-servicing-station development shell ===" + echo "Info: Git hooks can be installed using \`pre-commit install\`" + ''; + }; + + checks.pre-commit = pre-commit; + + hydraJobs = packages; + + defaultPackage = warnToUpdateNix packages.default; + devShell = warnToUpdateNix devShells.default; + } + ); +} diff --git a/src/vit-servicing-station-f10/resources/snapshot/snapshot.json b/src/vit-servicing-station-f10/resources/snapshot/snapshot.json new file mode 100644 index 0000000000..d53da0455b --- /dev/null +++ b/src/vit-servicing-station-f10/resources/snapshot/snapshot.json @@ -0,0 +1,18 @@ +[ + { + "voting_key": "ed25519_pk19t8y8xl43uy99ywngpfcfsaklw76h48m965y5cszt5phmj2uv4psucdev9", + "voting_power": "1", + "group": "g1" + }, + { + "voting_key": "ed25519_pk158t34dk8qmjs0mcwgsa5hg75qrg00wl2mejgt45vkelhf22d0wwqf22u0v", + "voting_power": "3", + "group": "g1" + }, + { + "voting_key": "ed25519_pk1fht207rmf0wqfdx59n4fa4dskvqq0w88rh2hgnava0g825lspcmsctw8t2", + "voting_power": "4", + "group": "g1" + } +] + diff --git a/src/vit-servicing-station-f10/resources/tests/block0.bin b/src/vit-servicing-station-f10/resources/tests/block0.bin new file mode 100644 index 0000000000..0e520a00e8 Binary files /dev/null and b/src/vit-servicing-station-f10/resources/tests/block0.bin differ diff --git a/src/vit-servicing-station-f10/resources/tests/csvs/challenges.csv b/src/vit-servicing-station-f10/resources/tests/csvs/challenges.csv new file mode 100644 index 0000000000..839a451eb0 --- /dev/null +++ b/src/vit-servicing-station-f10/resources/tests/csvs/challenges.csv @@ -0,0 +1,4 @@ +id,title,description,rewards_total,fund_id,challenge_url +1,Challenge 1,Something,9000,-1,http://example.com/challenges/1 +2,Challenge 2,Hey hey hey,100500,-1,http://example.com/challenges/2 +3,Challenge 3,Another one,420000,-1,http://example.com/challenges/3 \ No newline at end of file diff --git a/src/vit-servicing-station-f10/resources/tests/csvs/funds.csv b/src/vit-servicing-station-f10/resources/tests/csvs/funds.csv new file mode 100644 index 0000000000..fd66fecc88 --- /dev/null +++ b/src/vit-servicing-station-f10/resources/tests/csvs/funds.csv @@ -0,0 +1,2 @@ +id,fund_name,fund_goal,voting_power_threshold,rewards_info,fund_start_time,fund_end_time,next_fund_start_time +-1,Fund1,Fund the future of Cardano,8000000000,2020-08-18T14:00:00Z,2020-08-10T14:18:12Z,2020-08-16T00:00:00Z,2020-08-19T00:00:00Z diff --git a/src/vit-servicing-station-f10/resources/tests/csvs/proposals.csv b/src/vit-servicing-station-f10/resources/tests/csvs/proposals.csv new file mode 100644 index 0000000000..1c37e6bb47 --- /dev/null +++ b/src/vit-servicing-station-f10/resources/tests/csvs/proposals.csv @@ -0,0 +1,21 @@ +internal_id,proposal_id,category_name,proposal_title,proposal_summary,proposal_problem,proposal_solution,proposal_public_key,proposal_funds,proposal_url,proposal_files_url,proposer_name,proposer_email,proposer_url,chain_proposal_id,chain_proposal_index,chain_vote_options,chain_voteplan_id,proposal_impact_score,proposer_relevant_experience,challenge_id +-1,16444246,Fund0 Development,Test proposal 16444246,To test the proposal process 16444246,We haven't tested proposal integration yet 16444246,Test the proposal integration process 16444246,Ae2tdPwUPEYwrazXRJVK4NgHSZCjP9kLSMrx2awgYiBH61zT8kz6u33Sije,1000246,https://iohk.submittable.com/submissions/16444246,https://iohk.submittable.com/submissions/16444246/file/0,IOHK 16444246,iohk_16444246@iohk.io,https://iohk.io,5db05d3c7bfc37f2059d24966aa6ef05cfa25b6a478dedb3b93f5dca5c57c24a,0,"blank,yes,no",c983969a99106853cd32f972c471a01a73a22ea20a030bb4491aecfc676e9a8c,100,1 +-1,16444247,Fund0 Development,Test proposal 16444247,To test the proposal process 16444247,We haven't tested proposal integration yet 16444247,Test the proposal integration process 16444247,Ae2tdPwUPEYwrazXRJVK4NgHSZCjP9kLSMrx2awgYiBH61zT8kz6u33Sije,1000247,https://iohk.submittable.com/submissions/16444247,https://iohk.submittable.com/submissions/16444247/file/0,IOHK 16444247,iohk_16444247@iohk.io,https://iohk.io,f78a5e1b0cc558529be705d58479602ce8fe7af1b11e8d383e0b112d2d58d3fe,1,"blank,yes,no",c983969a99106853cd32f972c471a01a73a22ea20a030bb4491aecfc676e9a8c,110,1 +-1,16444248,Fund0 Development,Test proposal 16444248,To test the proposal process 16444248,We haven't tested proposal integration yet 16444248,Test the proposal integration process 16444248,Ae2tdPwUPEYwrazXRJVK4NgHSZCjP9kLSMrx2awgYiBH61zT8kz6u33Sije,1000248,https://iohk.submittable.com/submissions/16444248,https://iohk.submittable.com/submissions/16444248/file/0,IOHK 16444248,iohk_16444248@iohk.io,https://iohk.io,72a6cc91c00e87ca769e343f81332ce1e9ea294f8451e5402c1b22fd242f983d,2,"blank,yes,no",c983969a99106853cd32f972c471a01a73a22ea20a030bb4491aecfc676e9a8c,200,1 +-1,16444249,Fund0 Development,Test proposal 16444249,To test the proposal process 16444249,We haven't tested proposal integration yet 16444249,Test the proposal integration process 16444249,Ae2tdPwUPEYwrazXRJVK4NgHSZCjP9kLSMrx2awgYiBH61zT8kz6u33Sije,1000249,https://iohk.submittable.com/submissions/16444249,https://iohk.submittable.com/submissions/16444249/file/0,IOHK 16444249,iohk_16444249@iohk.io,https://iohk.io,62686eba02fd32a9d9c2116eea44aebe21c058f9974bd675eb379c69b565d762,3,"blank,yes,no",c983969a99106853cd32f972c471a01a73a22ea20a030bb4491aecfc676e9a8c,150,1 +-1,16444250,Fund0 Development,Test proposal 16444250,To test the proposal process 16444250,We haven't tested proposal integration yet 16444250,Test the proposal integration process 16444250,Ae2tdPwUPEYwrazXRJVK4NgHSZCjP9kLSMrx2awgYiBH61zT8kz6u33Sije,1000250,https://iohk.submittable.com/submissions/16444250,https://iohk.submittable.com/submissions/16444250/file/0,IOHK 16444250,iohk_16444250@iohk.io,https://iohk.io,8ad198af5e4be530b9cd40811fdf65e00b81860664372df5918a792450db90df,4,"blank,yes,no",c983969a99106853cd32f972c471a01a73a22ea20a030bb4491aecfc676e9a8c,250,1 +-1,16444251,Fund0 Development,Test proposal 16444251,To test the proposal process 16444251,We haven't tested proposal integration yet 16444251,Test the proposal integration process 16444251,Ae2tdPwUPEYwrazXRJVK4NgHSZCjP9kLSMrx2awgYiBH61zT8kz6u33Sije,1000251,https://iohk.submittable.com/submissions/16444251,https://iohk.submittable.com/submissions/16444251/file/0,IOHK 16444251,iohk_16444251@iohk.io,https://iohk.io,96b112275153277f0e20b240b64f5e4269a8875b3bb0f3a119ef1270024e51ae,5,"blank,yes,no",c983969a99106853cd32f972c471a01a73a22ea20a030bb4491aecfc676e9a8c,300,1 +-1,16444252,Fund0 Development,Test proposal 16444252,To test the proposal process 16444252,We haven't tested proposal integration yet 16444252,Test the proposal integration process 16444252,Ae2tdPwUPEYwrazXRJVK4NgHSZCjP9kLSMrx2awgYiBH61zT8kz6u33Sije,1000252,https://iohk.submittable.com/submissions/16444252,https://iohk.submittable.com/submissions/16444252/file/0,IOHK 16444252,iohk_16444252@iohk.io,https://iohk.io,58b60d9c4d803c508aa34efb4c5f20582b2651d917da7d9440a294a3bd762798,6,"blank,yes,no",c983969a99106853cd32f972c471a01a73a22ea20a030bb4491aecfc676e9a8c,350,1 +-1,16444253,Fund0 Development,Test proposal 16444253,To test the proposal process 16444253,We haven't tested proposal integration yet 16444253,Test the proposal integration process 16444253,Ae2tdPwUPEYwrazXRJVK4NgHSZCjP9kLSMrx2awgYiBH61zT8kz6u33Sije,1000253,https://iohk.submittable.com/submissions/16444253,https://iohk.submittable.com/submissions/16444253/file/0,IOHK 16444253,iohk_16444253@iohk.io,https://iohk.io,6c8ea72dadb0d4a7de6b20626f310efae5f6b6001a4a841968108d6e36f7894e,7,"blank,yes,no",c983969a99106853cd32f972c471a01a73a22ea20a030bb4491aecfc676e9a8c,450,1 +-1,16444254,Fund0 Development,Test proposal 16444254,To test the proposal process 16444254,We haven't tested proposal integration yet 16444254,Test the proposal integration process 16444254,Ae2tdPwUPEYwrazXRJVK4NgHSZCjP9kLSMrx2awgYiBH61zT8kz6u33Sije,1000254,https://iohk.submittable.com/submissions/16444254,https://iohk.submittable.com/submissions/16444254/file/0,IOHK 16444254,iohk_16444254@iohk.io,https://iohk.io,a5282b087184d8125dbb0fadbdff24fcc61aec71f725f8fc887d02cf292091ec,8,"blank,yes,no",c983969a99106853cd32f972c471a01a73a22ea20a030bb4491aecfc676e9a8c,500,1 +-1,16444255,Fund0 Development,Test proposal 16444255,To test the proposal process 16444255,We haven't tested proposal integration yet 16444255,Test the proposal integration process 16444255,Ae2tdPwUPEYwrazXRJVK4NgHSZCjP9kLSMrx2awgYiBH61zT8kz6u33Sije,1000255,https://iohk.submittable.com/submissions/16444255,https://iohk.submittable.com/submissions/16444255/file/0,IOHK 16444255,iohk_16444255@iohk.io,https://iohk.io,9a9f5535c61f49f339a1fb8f0d82962692d7f97905c3b06415d63e1477da5677,9,"blank,yes,no",c983969a99106853cd32f972c471a01a73a22ea20a030bb4491aecfc676e9a8c,400,2 +-1,16444256,Fund0 Development,Test proposal 16444256,To test the proposal process 16444256,We haven't tested proposal integration yet 16444256,Test the proposal integration process 16444256,Ae2tdPwUPEYwrazXRJVK4NgHSZCjP9kLSMrx2awgYiBH61zT8kz6u33Sije,1000256,https://iohk.submittable.com/submissions/16444256,https://iohk.submittable.com/submissions/16444256/file/0,IOHK 16444256,iohk_16444256@iohk.io,https://iohk.io,df2596ad616577c9047d23f106371258c98c329a662432c1e57d80092ae74e44,0,"blank,yes,no",ebcb5af4bc2823fa2a66f4a364c41d44e6cf118f8f3d32fa0920ed32df4632ae,430,2 +-1,16444257,Fund0 Development,Test proposal 16444257,To test the proposal process 16444257,We haven't tested proposal integration yet 16444257,Test the proposal integration process 16444257,Ae2tdPwUPEYwrazXRJVK4NgHSZCjP9kLSMrx2awgYiBH61zT8kz6u33Sije,1000257,https://iohk.submittable.com/submissions/16444257,https://iohk.submittable.com/submissions/16444257/file/0,IOHK 16444257,iohk_16444257@iohk.io,https://iohk.io,d9d3544bad57f2597f55fce907adfe4ad5afe1aedc3d5480a3745a200b153708,1,"blank,yes,no",ebcb5af4bc2823fa2a66f4a364c41d44e6cf118f8f3d32fa0920ed32df4632ae,420,2 +-1,16444258,Fund0 Development,Test proposal 16444258,To test the proposal process 16444258,We haven't tested proposal integration yet 16444258,Test the proposal integration process 16444258,Ae2tdPwUPEYwrazXRJVK4NgHSZCjP9kLSMrx2awgYiBH61zT8kz6u33Sije,1000258,https://iohk.submittable.com/submissions/16444258,https://iohk.submittable.com/submissions/16444258/file/0,IOHK 16444258,iohk_16444258@iohk.io,https://iohk.io,21381dd53499d6bd18362fcc442fb619cb10a4e52f64f42cea3b71c36aaad52e,2,"blank,yes,no",ebcb5af4bc2823fa2a66f4a364c41d44e6cf118f8f3d32fa0920ed32df4632ae,50,2 +-1,16444259,Fund0 Development,Test proposal 16444259,To test the proposal process 16444259,We haven't tested proposal integration yet 16444259,Test the proposal integration process 16444259,Ae2tdPwUPEYwrazXRJVK4NgHSZCjP9kLSMrx2awgYiBH61zT8kz6u33Sije,1000259,https://iohk.submittable.com/submissions/16444259,https://iohk.submittable.com/submissions/16444259/file/0,IOHK 16444259,iohk_16444259@iohk.io,https://iohk.io,fafb0aa86365f86b9dbb68299b0ae4fe4b3701b323b91789ea003d0a906de094,3,"blank,yes,no",ebcb5af4bc2823fa2a66f4a364c41d44e6cf118f8f3d32fa0920ed32df4632ae,90,2 +-1,16444260,Fund0 Development,Test proposal 16444260,To test the proposal process 16444260,We haven't tested proposal integration yet 16444260,Test the proposal integration process 16444260,Ae2tdPwUPEYwrazXRJVK4NgHSZCjP9kLSMrx2awgYiBH61zT8kz6u33Sije,1000260,https://iohk.submittable.com/submissions/16444260,https://iohk.submittable.com/submissions/16444260/file/0,IOHK 16444260,iohk_16444260@iohk.io,https://iohk.io,a0d9b97b69e1260c7ceea48634393aa8d86d60b4e4853806d296c49bcb294b61,4,"blank,yes,no",ebcb5af4bc2823fa2a66f4a364c41d44e6cf118f8f3d32fa0920ed32df4632ae,55,2 +-1,16444261,Fund0 Development,Test proposal 16444261,To test the proposal process 16444261,We haven't tested proposal integration yet 16444261,Test the proposal integration process 16444261,Ae2tdPwUPEYwrazXRJVK4NgHSZCjP9kLSMrx2awgYiBH61zT8kz6u33Sije,1000261,https://iohk.submittable.com/submissions/16444261,https://iohk.submittable.com/submissions/16444261/file/0,IOHK 16444261,iohk_16444261@iohk.io,https://iohk.io,f918dc9d9218990307140e266e94c4e84c13a07699869f4941f578d21b5b607a,5,"blank,yes,no",ebcb5af4bc2823fa2a66f4a364c41d44e6cf118f8f3d32fa0920ed32df4632ae,355,2 +-1,16444262,Fund0 Development,Test proposal 16444262,To test the proposal process 16444262,We haven't tested proposal integration yet 16444262,Test the proposal integration process 16444262,Ae2tdPwUPEYwrazXRJVK4NgHSZCjP9kLSMrx2awgYiBH61zT8kz6u33Sije,1000262,https://iohk.submittable.com/submissions/16444262,https://iohk.submittable.com/submissions/16444262/file/0,IOHK 16444262,iohk_16444262@iohk.io,https://iohk.io,ff1215018c8dcd91dcc544ff7239bd40697431fcc203ef0162d4c6a04eebe814,6,"blank,yes,no",ebcb5af4bc2823fa2a66f4a364c41d44e6cf118f8f3d32fa0920ed32df4632ae,245,3 +-1,16444263,Fund0 Development,Test proposal 16444263,To test the proposal process 16444263,We haven't tested proposal integration yet 16444263,Test the proposal integration process 16444263,Ae2tdPwUPEYwrazXRJVK4NgHSZCjP9kLSMrx2awgYiBH61zT8kz6u33Sije,1000263,https://iohk.submittable.com/submissions/16444263,https://iohk.submittable.com/submissions/16444263/file/0,IOHK 16444263,iohk_16444263@iohk.io,https://iohk.io,a5e04844875843fbd4d7914718048716174829acf5618370b6f0b6af983392a9,7,"blank,yes,no",ebcb5af4bc2823fa2a66f4a364c41d44e6cf118f8f3d32fa0920ed32df4632ae,154,3 +-1,16444264,Fund0 Development,Test proposal 16444264,To test the proposal process 16444264,We haven't tested proposal integration yet 16444264,Test the proposal integration process 16444264,Ae2tdPwUPEYwrazXRJVK4NgHSZCjP9kLSMrx2awgYiBH61zT8kz6u33Sije,1000264,https://iohk.submittable.com/submissions/16444264,https://iohk.submittable.com/submissions/16444264/file/0,IOHK 16444264,iohk_16444264@iohk.io,https://iohk.io,417d930f57ac00c9aaafd2100a3425ff3aa26c1f36540ba270ea0d007f9715b2,8,"blank,yes,no",ebcb5af4bc2823fa2a66f4a364c41d44e6cf118f8f3d32fa0920ed32df4632ae,234,3 +-1,16444265,Fund0 Development,Test proposal 16444265,To test the proposal process 16444265,We haven't tested proposal integration yet 16444265,Test the proposal integration process 16444265,Ae2tdPwUPEYwrazXRJVK4NgHSZCjP9kLSMrx2awgYiBH61zT8kz6u33Sije,1000265,https://iohk.submittable.com/submissions/16444265,https://iohk.submittable.com/submissions/16444265/file/0,IOHK 16444265,iohk_16444265@iohk.io,https://iohk.io,31a4ecb01eeae808323a11621173f684f64cd35b76b5fe876abfaf694095fee9,9,"blank,yes,no",ebcb5af4bc2823fa2a66f4a364c41d44e6cf118f8f3d32fa0920ed32df4632ae,123,3 diff --git a/src/vit-servicing-station-f10/resources/tests/csvs/voteplans.csv b/src/vit-servicing-station-f10/resources/tests/csvs/voteplans.csv new file mode 100644 index 0000000000..d1b9fa982e --- /dev/null +++ b/src/vit-servicing-station-f10/resources/tests/csvs/voteplans.csv @@ -0,0 +1,3 @@ +id,chain_voteplan_id,chain_vote_start_time,chain_vote_end_time,chain_committee_end_time,chain_voteplan_payload,chain_vote_encryption_key,fund_id +-1,c983969a99106853cd32f972c471a01a73a22ea20a030bb4491aecfc676e9a8c,2020-08-10T14:18:12Z,2020-08-16T00:00:00Z,2020-08-19T00:00:00Z,public,,-1 +-1,ebcb5af4bc2823fa2a66f4a364c41d44e6cf118f8f3d32fa0920ed32df4632ae,2020-08-10T14:18:12Z,2020-08-16T00:00:00Z,2020-08-19T00:00:00Z,public,,-1 diff --git a/src/vit-servicing-station-f10/shell.nix b/src/vit-servicing-station-f10/shell.nix new file mode 100644 index 0000000000..e6d917316e --- /dev/null +++ b/src/vit-servicing-station-f10/shell.nix @@ -0,0 +1,14 @@ +( + import + ( + let + lock = builtins.fromJSON (builtins.readFile ./flake.lock); + in + fetchTarball { + url = "https://github.com/edolstra/flake-compat/archive/${lock.nodes.flake-compat.locked.rev}.tar.gz"; + sha256 = lock.nodes.flake-compat.locked.narHash; + } + ) + {src = ./.;} +) +.shellNix diff --git a/src/vit-servicing-station-f10/vit-servicing-station-cli-f10/Cargo.toml b/src/vit-servicing-station-f10/vit-servicing-station-cli-f10/Cargo.toml new file mode 100644 index 0000000000..4abf74fdd8 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-cli-f10/Cargo.toml @@ -0,0 +1,26 @@ +[package] +name = "vit-servicing-station-cli-f10" +version = "0.5.0" +authors = ["danielsanchezq "] +edition = "2018" +license = "MIT OR Apache-2.0" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +base64 = "0.12.1" +time = "0.3" +csv = "1.1" +diesel = "1.4" +rand = "0.7.3" +r2d2 = "0.8" +structopt = "0.3.14" +serde = { version = "1.0", features = ["derive"] } +serde_json = "1" +tempfile = "3.1.0" +thiserror = "1.0" +vit-servicing-station-lib-f10 = { path = "../vit-servicing-station-lib-f10" } + +[dev-dependencies] +diesel = { version = "1.4.5", features = ["sqlite", "r2d2"] } +diesel_migrations = "1.4.0" diff --git a/src/vit-servicing-station-f10/vit-servicing-station-cli-f10/src/api_token.rs b/src/vit-servicing-station-f10/vit-servicing-station-cli-f10/src/api_token.rs new file mode 100644 index 0000000000..75515fc3b6 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-cli-f10/src/api_token.rs @@ -0,0 +1,203 @@ +use crate::db_utils::{backup_db_file, restore_db_file}; +use crate::{db_utils::db_file_exists, task::ExecTask}; +use rand::Rng; +use std::collections::HashSet; +use std::io; +use structopt::StructOpt; +use thiserror::Error; +use time::{Duration, OffsetDateTime}; +use vit_servicing_station_lib_f10::{ + db::{ + load_db_connection_pool, models::api_tokens::ApiTokenData, + queries::api_tokens::insert_token_data, DbConnection, Error as DbPoolError, + }, + v0::api_token::ApiToken, +}; + +#[derive(Error, Debug)] +pub enum Error { + #[error("base64 encoded token `{token}` is not valid")] + Base64Decode { + #[source] + source: base64::DecodeError, + token: String, + }, + + #[error("Error with database")] + Db(#[from] diesel::result::Error), + + #[error("Error connecting db pool")] + DbPool(#[from] DbPoolError), + + #[error("Error connecting to db")] + DbConnection(#[from] r2d2::Error), + + #[error(transparent)] + Io(#[from] std::io::Error), +} + +#[derive(Debug, PartialEq, StructOpt)] +pub enum ApiTokenCmd { + /// Add provided tokens to database. If --tokens is not provided the binary will read them from the `stdin` + Add { + /// List of tokens in URL safe base64. If --tokens is not provided the binary will read them from the `stdin` + #[structopt(long = "tokens")] + tokens: Option>, + + /// URL of the vit-servicing-station database to interact with + #[structopt(long = "db-url")] + db_url: String, + }, + + /// Generate API tokens, URL safe base64 encoded. + Generate { + /// Number of tokens to generate + #[structopt(long = "n", default_value = "1")] + n: usize, + + /// Size of the token + #[structopt(long = "size", default_value = "10")] + size: usize, + }, +} + +impl ApiTokenCmd { + fn generate(n: usize, size: usize) -> Vec { + (0..n) + .map(|_| { + let random_bytes: Vec = + (0..size).map(|_| rand::thread_rng().gen::()).collect(); + base64::encode_config(random_bytes, base64::URL_SAFE_NO_PAD) + }) + .collect() + } + + fn add_tokens_from_stream(db_conn: &DbConnection) -> Result<(), Error> { + let mut base64_tokens: Vec = Vec::new(); + let mut input = String::new(); + while let Ok(n) = io::stdin().read_line(&mut input) { + if n == 0 { + break; + } + // pop the trailing `\n` + input.pop(); + base64_tokens.push(input.clone()); + } + ApiTokenCmd::add_tokens(&base64_tokens, db_conn) + } + + fn add_tokens(base64_tokens: &[String], db_conn: &DbConnection) -> Result<(), Error> { + // filter duplicated tokens + let base64_tokens: HashSet = base64_tokens.iter().cloned().collect(); + for base64_token in base64_tokens { + let token = + base64::decode_config(&base64_token, base64::URL_SAFE_NO_PAD).map_err(|e| { + Error::Base64Decode { + source: e, + token: base64_token, + } + })?; + let api_token_data = ApiTokenData { + token: ApiToken::new(token), + creation_time: OffsetDateTime::now_utc().unix_timestamp(), + expire_time: (OffsetDateTime::now_utc() + Duration::days(365)).unix_timestamp(), + }; + insert_token_data(api_token_data, db_conn).map_err(Error::Db)?; + } + Ok(()) + } + + fn handle_api_token_add(tokens: &Option>, db_url: &str) -> Result<(), Error> { + // check if db file exists + db_file_exists(db_url)?; + + let pool = load_db_connection_pool(db_url).map_err(Error::DbPool)?; + let db_conn = pool.get()?; + + match tokens { + // if not tokens are provided then listen to stdin for input ones + None => ApiTokenCmd::add_tokens_from_stream(&db_conn), + // process the provided tokens + Some(tokens) => ApiTokenCmd::add_tokens(tokens, &db_conn), + } + } + + fn handle_api_token_add_whith_db_backup( + tokens: &Option>, + db_url: &str, + ) -> Result<(), Error> { + let backup_file = backup_db_file(db_url)?; + if let Err(e) = Self::handle_api_token_add(tokens, db_url) { + restore_db_file(backup_file, db_url)?; + Err(e) + } else { + Ok(()) + } + } + + fn handle_generate(n: usize, size: usize) { + let tokens = ApiTokenCmd::generate(n, size); + for token in tokens { + println!("{}", token); + } + } +} + +impl ExecTask for ApiTokenCmd { + type ResultValue = (); + type Error = Error; + + fn exec(&self) -> Result<(), Error> { + match self { + ApiTokenCmd::Add { tokens, db_url } => { + ApiTokenCmd::handle_api_token_add_whith_db_backup(tokens, db_url) + } + ApiTokenCmd::Generate { n, size } => { + ApiTokenCmd::handle_generate(*n, *size); + Ok(()) + } + } + } +} + +#[cfg(test)] +mod test { + use super::*; + use vit_servicing_station_lib_f10::db::{ + load_db_connection_pool, migrations::initialize_db_with_migration, + queries::api_tokens::query_token_data_by_token, + }; + + #[test] + fn generate_token() { + let size = 10; + let n = 10; + let tokens = ApiTokenCmd::generate(n, size); + assert_eq!(tokens.len(), n); + tokens.iter().for_each(|token| { + assert_eq!( + base64::decode_config(token, base64::URL_SAFE_NO_PAD) + .unwrap() + .len(), + size + ) + }) + } + + #[test] + fn add_token() { + let tokens = ApiTokenCmd::generate(10, 10); + let connection_pool = load_db_connection_pool("").unwrap(); + initialize_db_with_migration(&connection_pool.get().unwrap()); + let db_conn = connection_pool.get().unwrap(); + ApiTokenCmd::add_tokens(&tokens, &db_conn).unwrap(); + for token in tokens + .iter() + .map(|t| base64::decode_config(t, base64::URL_SAFE_NO_PAD).unwrap()) + { + assert!(query_token_data_by_token(token.as_ref(), &db_conn) + .unwrap() + .is_some()); + } + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-cli-f10/src/app.rs b/src/vit-servicing-station-f10/vit-servicing-station-cli-f10/src/app.rs new file mode 100644 index 0000000000..48e8a0a84c --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-cli-f10/src/app.rs @@ -0,0 +1,39 @@ +use crate::api_token::{ApiTokenCmd, Error as ApiTokenError}; +use crate::csv::loaders::{CsvDataCmd, Error as CsvDataError}; +use crate::init_db::{Db, Error as DbError}; +use crate::task::ExecTask; +use structopt::StructOpt; +use thiserror::Error; + +#[derive(Error, Debug)] +pub enum Error { + #[error(transparent)] + ApiTokenCmd(#[from] ApiTokenError), + #[error(transparent)] + CsvData(#[from] CsvDataError), + #[error(transparent)] + Db(#[from] DbError), +} + +#[derive(StructOpt)] +pub enum CliApp { + /// API token related operations + ApiToken(ApiTokenCmd), + /// CSV data loaders + CsvData(CsvDataCmd), + /// DB related operations + Db(Db), +} + +impl ExecTask for CliApp { + type ResultValue = (); + type Error = Error; + fn exec(&self) -> Result { + match self { + CliApp::ApiToken(api_token) => api_token.exec()?, + CliApp::CsvData(csv_data) => csv_data.exec()?, + CliApp::Db(db_cmd) => db_cmd.exec()?, + }; + Ok(()) + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-cli-f10/src/csv/loaders.rs b/src/vit-servicing-station-f10/vit-servicing-station-cli-f10/src/csv/loaders.rs new file mode 100644 index 0000000000..8c848503c0 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-cli-f10/src/csv/loaders.rs @@ -0,0 +1,277 @@ +use crate::db_utils::{backup_db_file, restore_db_file}; +use crate::{db_utils::db_file_exists, task::ExecTask}; +use csv::Trim; +use serde::de::DeserializeOwned; +use std::convert::TryInto; +use std::io; +use std::path::{Path, PathBuf}; +use structopt::StructOpt; +use thiserror::Error; +use vit_servicing_station_lib_f10::db::models::goals::InsertGoal; +use vit_servicing_station_lib_f10::db::models::proposals::{ + community_choice, simple, ProposalChallengeInfo, +}; +use vit_servicing_station_lib_f10::db::{ + load_db_connection_pool, + models::{funds::Fund, proposals::Proposal, voteplans::Voteplan}, +}; + +#[derive(Error, Debug)] +pub enum Error { + #[error(transparent)] + IoError(#[from] std::io::Error), + + #[error("Invalid Fund Data: {0}")] + InvalidFundData(String), +} + +#[derive(Debug, PartialEq, StructOpt)] +pub enum CsvDataCmd { + /// Load Funds, Voteplans and Proposals information into a SQLite3 ready file DB. + Load { + /// URL of the vit-servicing-station database to interact with + #[structopt(long = "db-url")] + db_url: String, + + /// Path to the csv containing funds information + /// At the moment, it's required these are ordered. + /// + /// Also the first fund being the current one, which means previous funds should not be + /// included. This restriction may be lifted in the future. + #[structopt(long = "funds")] + funds: PathBuf, + + /// Path to the csv containing voteplans information + #[structopt(long = "voteplans")] + voteplans: PathBuf, + + /// Path to the csv containing proposals information + #[structopt(long = "proposals")] + proposals: PathBuf, + + /// Path to the csv containing challenges information + #[structopt(long = "challenges")] + challenges: PathBuf, + + /// Path to the csv containing advisor reviews information + #[structopt(long = "reviews")] + reviews: PathBuf, + + /// Path to the csv containing goals information + #[structopt(long = "goals")] + goals: PathBuf, + }, +} + +impl CsvDataCmd { + fn load_from_csv(csv_path: &Path) -> io::Result> { + let mut reader = csv::ReaderBuilder::new() + .flexible(true) + .has_headers(true) + .quoting(true) + .quote(b'"') + .trim(Trim::All) + .from_path(csv_path)?; + let mut results = Vec::new(); + for record in reader.deserialize() { + match record { + Ok(data) => { + results.push(data); + } + Err(e) => { + return Err(io::Error::new( + io::ErrorKind::InvalidInput, + format!( + "Error in file {}.\nCause:\n\t{}", + csv_path.to_string_lossy(), + e + ), + )) + } + } + } + Ok(results) + } + + fn handle_load( + db_url: &str, + funds_path: &Path, + voteplans_path: &Path, + proposals_path: &Path, + challenges_path: &Path, + reviews_path: &Path, + goals_path: &Path, + ) -> Result<(), Error> { + db_file_exists(db_url)?; + let funds = CsvDataCmd::load_from_csv::(funds_path)?; + + let mut voteplans = CsvDataCmd::load_from_csv::(voteplans_path)?; + let mut challenges = + CsvDataCmd::load_from_csv::(challenges_path)?; + let csv_proposals = CsvDataCmd::load_from_csv::(proposals_path)?; + let reviews = CsvDataCmd::load_from_csv::(reviews_path)? + .into_iter() + .map(TryInto::try_into) + .collect::, _>>()?; + let mut goals: Vec = CsvDataCmd::load_from_csv::(goals_path)?; + + let mut proposals: Vec = Vec::new(); + let mut simple_proposals_data: Vec = Vec::new(); + let mut community_proposals_data: Vec = Vec::new(); + + for proposal in csv_proposals { + let challenge_type = challenges + .iter() + .find(|c| proposal.challenge_id == c.id) + .ok_or_else(|| { + std::io::Error::new( + std::io::ErrorKind::InvalidData, + format!("Challenge with id {} not found", proposal.challenge_id), + ) + })? + .challenge_type + .clone(); + let (proposal, challenge_info) = + proposal.into_db_proposal_and_challenge_info(challenge_type)?; + match challenge_info { + ProposalChallengeInfo::Simple(simple) => simple_proposals_data + .push(simple.to_sql_values_with_proposal_id(&proposal.proposal_id)), + ProposalChallengeInfo::CommunityChoice(community_choice) => { + community_proposals_data.push( + community_choice.to_sql_values_with_proposal_id(&proposal.proposal_id), + ) + } + }; + } + + // start db connection + let pool = load_db_connection_pool(db_url) + .map_err(|e| io::Error::new(io::ErrorKind::InvalidInput, format!("{}", e)))?; + let db_conn = pool + .get() + .map_err(|e| io::Error::new(io::ErrorKind::NotConnected, format!("{}", e)))?; + + let mut funds_iter = funds.into_iter(); + + // insert fund and retrieve fund with id + let fund = vit_servicing_station_lib_f10::db::queries::funds::insert_fund( + funds_iter + .next() + .ok_or_else(|| Error::InvalidFundData(funds_path.to_string_lossy().to_string()))?, + &db_conn, + ) + .map_err(|e| io::Error::new(io::ErrorKind::Other, format!("{}", e)))?; + + for fund in funds_iter { + vit_servicing_station_lib_f10::db::queries::funds::insert_fund(fund, &db_conn) + .map_err(|e| io::Error::new(io::ErrorKind::Other, format!("{}", e)))?; + } + + // apply fund id in voteplans + for voteplan in voteplans.iter_mut() { + voteplan.fund_id = fund.id; + } + + // apply fund id in proposals + for proposal in proposals.iter_mut() { + proposal.fund_id = fund.id; + } + + // apply fund id to challenges + for challenge in challenges.iter_mut() { + challenge.fund_id = fund.id; + } + + for goal in goals.iter_mut() { + goal.fund_id = fund.id; + } + + vit_servicing_station_lib_f10::db::queries::voteplans::batch_insert_voteplans( + &voteplans, &db_conn, + ) + .map_err(|e| io::Error::new(io::ErrorKind::Other, format!("{}", e)))?; + + vit_servicing_station_lib_f10::db::queries::proposals::batch_insert_proposals( + &proposals, &db_conn, + ) + .map_err(|e| io::Error::new(io::ErrorKind::Other, format!("{}", e)))?; + + vit_servicing_station_lib_f10::db::queries::proposals::batch_insert_simple_challenge_data( + &simple_proposals_data, + &db_conn, + ) + .map_err(|e| io::Error::new(io::ErrorKind::Other, format!("{}", e)))?; + + vit_servicing_station_lib_f10::db::queries::proposals::batch_insert_community_choice_challenge_data( + &community_proposals_data, + &db_conn, + ) + .map_err(|e| io::Error::new(io::ErrorKind::Other, format!("{}", e)))?; + + vit_servicing_station_lib_f10::db::queries::challenges::batch_insert_challenges( + &challenges + .into_iter() + .map(|c| c.into_db_challenge_values()) + .collect::>(), + &db_conn, + ) + .map_err(|e| io::Error::new(io::ErrorKind::Other, format!("{}", e)))?; + + vit_servicing_station_lib_f10::db::queries::community_advisors_reviews::batch_insert_advisor_reviews(&reviews, &db_conn) + .map_err(|e| io::Error::new(io::ErrorKind::Other, format!("{}", e)))?; + + vit_servicing_station_lib_f10::db::queries::goals::batch_insert(goals, &db_conn) + .map_err(|e| io::Error::new(io::ErrorKind::Other, format!("{}", e)))?; + + Ok(()) + } + + fn handle_load_with_db_backup( + db_url: &str, + funds_path: &Path, + voteplans_path: &Path, + proposals_path: &Path, + challenges_path: &Path, + reviews: &Path, + goals: &Path, + ) -> Result<(), Error> { + let backup_file = backup_db_file(db_url)?; + if let Err(e) = Self::handle_load( + db_url, + funds_path, + voteplans_path, + proposals_path, + challenges_path, + reviews, + goals, + ) { + restore_db_file(backup_file, db_url)?; + Err(e) + } else { + Ok(()) + } + } +} + +impl ExecTask for CsvDataCmd { + type ResultValue = (); + type Error = Error; + fn exec(&self) -> Result<(), Error> { + match self { + CsvDataCmd::Load { + db_url, + funds, + voteplans, + proposals, + challenges, + reviews, + goals, + } => Self::handle_load_with_db_backup( + db_url, funds, voteplans, proposals, challenges, reviews, goals, + ), + } + } +} + +#[cfg(test)] +mod test {} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-cli-f10/src/csv/mod.rs b/src/vit-servicing-station-f10/vit-servicing-station-cli-f10/src/csv/mod.rs new file mode 100644 index 0000000000..34a5d4fda4 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-cli-f10/src/csv/mod.rs @@ -0,0 +1,2 @@ +pub mod loaders; +mod models; diff --git a/src/vit-servicing-station-f10/vit-servicing-station-cli-f10/src/csv/models.rs b/src/vit-servicing-station-f10/vit-servicing-station-cli-f10/src/csv/models.rs new file mode 100644 index 0000000000..7e100bbf33 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-cli-f10/src/csv/models.rs @@ -0,0 +1,297 @@ +use diesel::{ExpressionMethods, Insertable}; +use serde::Deserialize; +use std::convert::TryInto; +use vit_servicing_station_lib_f10::db::models::challenges::{ + Challenge as DbChallenge, ChallengeHighlights, +}; +use vit_servicing_station_lib_f10::db::models::community_advisors_reviews::{self, ReviewRanking}; +use vit_servicing_station_lib_f10::db::models::proposals::{ + self, community_choice, simple, Category, ChallengeType, ProposalChallengeInfo, Proposer, +}; +use vit_servicing_station_lib_f10::db::models::vote_options::VoteOptions; +use vit_servicing_station_lib_f10::db::schema::challenges; + +#[derive(Deserialize, Clone, Debug, PartialEq, Eq)] +pub struct Challenge { + pub id: i32, + #[serde(alias = "challengeType")] + pub challenge_type: ChallengeType, + pub title: String, + pub description: String, + #[serde(alias = "rewardsTotal")] + pub rewards_total: i64, + #[serde(alias = "proposersRewards")] + pub proposers_rewards: i64, + #[serde(alias = "fundId")] + pub fund_id: i32, + #[serde(alias = "challengeUrl")] + pub challenge_url: String, + pub highlights: Option, +} + +#[derive(Deserialize, PartialEq, Eq, Debug, Clone)] +pub struct Proposal { + #[serde(alias = "internalId")] + pub internal_id: i32, + #[serde(alias = "proposalId")] + pub proposal_id: String, + #[serde(alias = "categoryId", default = "Default::default")] + pub category_id: String, + #[serde(alias = "categoryName")] + pub category_name: String, + #[serde(alias = "categoryDescription", default = "Default::default")] + pub category_description: String, + #[serde(alias = "proposalTitle")] + pub proposal_title: String, + #[serde(alias = "proposalSummary")] + pub proposal_summary: String, + #[serde(alias = "proposalPublicKey")] + pub proposal_public_key: String, + #[serde(alias = "proposalFunds")] + pub proposal_funds: i64, + #[serde(alias = "proposalUrl")] + pub proposal_url: String, + #[serde(alias = "proposalFilesUrl")] + pub proposal_files_url: String, + #[serde(alias = "proposalImpactScore")] + pub proposal_impact_score: i64, + #[serde(alias = "proposerName")] + pub proposer_name: String, + #[serde(alias = "proposerEmail")] + pub proposer_email: String, + #[serde(alias = "proposerUrl")] + pub proposer_url: String, + #[serde(alias = "proposerRelevantExperience")] + pub proposer_relevant_experience: String, + #[serde(alias = "chainProposalId")] + #[serde(serialize_with = "vit_servicing_station_lib_f10::utils::serde::serialize_bin_as_str")] + #[serde( + deserialize_with = "vit_servicing_station_lib_f10::utils::serde::deserialize_string_as_bytes" + )] + pub chain_proposal_id: Vec, + #[serde(alias = "chainProposalIndex")] + pub chain_proposal_index: i64, + #[serde(alias = "chainVoteOptions")] + pub chain_vote_options: String, + #[serde(alias = "chainVoteplanId")] + pub chain_voteplan_id: String, + #[serde(alias = "chainVoteStartTime", default = "Default::default")] + #[serde( + serialize_with = "vit_servicing_station_lib_f10::utils::serde::serialize_unix_timestamp_as_rfc3339" + )] + #[serde( + deserialize_with = "vit_servicing_station_lib_f10::utils::serde::deserialize_unix_timestamp_from_rfc3339" + )] + pub chain_vote_start_time: i64, + #[serde(alias = "chainVoteEndTime", default = "Default::default")] + #[serde( + serialize_with = "vit_servicing_station_lib_f10::utils::serde::serialize_unix_timestamp_as_rfc3339" + )] + #[serde( + deserialize_with = "vit_servicing_station_lib_f10::utils::serde::deserialize_unix_timestamp_from_rfc3339" + )] + pub chain_vote_end_time: i64, + #[serde(alias = "chainCommitteeEndTime", default = "Default::default")] + #[serde( + serialize_with = "vit_servicing_station_lib_f10::utils::serde::serialize_unix_timestamp_as_rfc3339" + )] + #[serde( + deserialize_with = "vit_servicing_station_lib_f10::utils::serde::deserialize_unix_timestamp_from_rfc3339" + )] + pub chain_committee_end_time: i64, + #[serde(alias = "chainVoteplanPayload", default = "Default::default")] + pub chain_voteplan_payload: String, + #[serde(alias = "chainVoteEncryptionKey", default = "Default::default")] + pub chain_vote_encryption_key: String, + #[serde(alias = "fundId", default = "default_fund_id")] + pub fund_id: i32, + #[serde(alias = "challengeId", default = "default_challenge_id")] + pub challenge_id: i32, + #[serde(alias = "proposalSolution", default)] + proposal_solution: Option, + #[serde(alias = "proposalBrief", default)] + proposal_brief: Option, + #[serde(alias = "proposalImportance", default)] + proposal_importance: Option, + #[serde(alias = "proposalGoal", default)] + proposal_goal: Option, + #[serde(alias = "proposalMetrics", default)] + proposal_metrics: Option, +} + +fn default_fund_id() -> i32 { + -1 +} + +fn default_challenge_id() -> i32 { + -1 +} + +impl Challenge { + pub fn into_db_challenge_values( + self, + ) -> >::Values { + ( + challenges::id.eq(self.id), + challenges::challenge_type.eq(self.challenge_type.to_string()), + challenges::title.eq(self.title), + challenges::description.eq(self.description), + challenges::rewards_total.eq(self.rewards_total), + challenges::proposers_rewards.eq(self.proposers_rewards), + challenges::fund_id.eq(self.fund_id), + challenges::challenge_url.eq(self.challenge_url), + // This should always be a valid json + challenges::highlights.eq(serde_json::to_string(&self.highlights).ok()), + ) + } +} + +impl Proposal { + pub fn into_db_proposal_and_challenge_info( + self, + challenge_type: ChallengeType, + ) -> Result<(proposals::Proposal, proposals::ProposalChallengeInfo), std::io::Error> { + let proposal = proposals::Proposal { + internal_id: self.internal_id, + proposal_id: self.proposal_id, + proposal_category: Category { + category_id: self.category_id, + category_name: self.category_name, + category_description: self.category_description, + }, + proposal_title: self.proposal_title, + proposal_summary: self.proposal_summary, + proposal_public_key: self.proposal_public_key, + proposal_funds: self.proposal_funds, + proposal_url: self.proposal_url, + proposal_files_url: self.proposal_files_url, + proposal_impact_score: self.proposal_impact_score, + reviews_count: 0, + proposer: Proposer { + proposer_name: self.proposer_name, + proposer_email: self.proposer_email, + proposer_url: self.proposer_url, + proposer_relevant_experience: self.proposer_relevant_experience, + }, + chain_proposal_id: self.chain_proposal_id, + chain_proposal_index: self.chain_proposal_index, + chain_vote_options: VoteOptions::parse_coma_separated_value(&self.chain_vote_options), + chain_voteplan_id: self.chain_voteplan_id, + chain_vote_start_time: self.chain_vote_start_time, + chain_vote_end_time: self.chain_vote_end_time, + chain_committee_end_time: self.chain_committee_end_time, + chain_voteplan_payload: self.chain_voteplan_payload, + chain_vote_encryption_key: self.chain_vote_encryption_key, + fund_id: self.fund_id, + challenge_id: self.challenge_id, + }; + + let challenge_info = match challenge_type { + ChallengeType::Simple | ChallengeType::Native => match self.proposal_solution { + Some(proposal_solution) => { + ProposalChallengeInfo::Simple(simple::ChallengeInfo { proposal_solution }) + } + None => { + return Err(std::io::Error::new( + std::io::ErrorKind::InvalidData, + "cannot match simple proposal's challenge information fields:\ + expected a value in `proposal_solution` column, found none", + )); + } + }, + ChallengeType::CommunityChoice => { + match ( + self.proposal_brief, + self.proposal_importance, + self.proposal_goal, + self.proposal_metrics, + ) { + ( + Some(proposal_brief), + Some(proposal_importance), + Some(proposal_goal), + Some(proposal_metrics), + ) => ProposalChallengeInfo::CommunityChoice(community_choice::ChallengeInfo { + proposal_brief, + proposal_importance, + proposal_goal, + proposal_metrics, + }), + values => { + return Err(std::io::Error::new( + std::io::ErrorKind::InvalidData, + format!( + "cannot match community choice proposal's challenge information fields:\ + expected values in columns `proposal_brief`, `proposal_importance`, `proposal_goal`, `proposal_metrics`, found: {:?}", + values + ), + )); + } + } + } + }; + Ok((proposal, challenge_info)) + } +} + +#[derive(Deserialize)] +pub struct AdvisorReview { + id: i32, + proposal_id: i32, + assessor: String, + impact_alignment_rating_given: i32, + impact_alignment_note: String, + feasibility_rating_given: i32, + feasibility_note: String, + auditability_rating_given: i32, + auditability_note: String, + #[serde( + alias = "Excellent", + deserialize_with = "vit_servicing_station_lib_f10::utils::serde::deserialize_truthy_falsy" + )] + excellent: bool, + #[serde( + alias = "Good", + deserialize_with = "vit_servicing_station_lib_f10::utils::serde::deserialize_truthy_falsy" + )] + good: bool, + #[serde( + default, + alias = "Filtered Out", + deserialize_with = "vit_servicing_station_lib_f10::utils::serde::deserialize_truthy_falsy" + )] + filtered_out: bool, +} + +impl TryInto for AdvisorReview { + type Error = std::io::Error; + + fn try_into(self) -> Result { + Ok(community_advisors_reviews::AdvisorReview { + id: self.id, + proposal_id: self.proposal_id, + assessor: self.assessor, + feasibility_note: self.feasibility_note, + feasibility_rating_given: self.feasibility_rating_given, + impact_alignment_note: self.impact_alignment_note, + impact_alignment_rating_given: self.impact_alignment_rating_given, + auditability_note: self.auditability_note, + auditability_rating_given: self.auditability_rating_given, + ranking: match (self.excellent, self.good, self.filtered_out) { + (true, false, false) => ReviewRanking::Excellent, + (false, true, false) => ReviewRanking::Good, + (false, false, true) => ReviewRanking::FilteredOut, + (false, false, false) => ReviewRanking::NA, + _ => { + return Err(std::io::Error::new( + std::io::ErrorKind::InvalidData, + format!( + "expected one-hot encoding, found {}-{}-{}", + self.excellent, self.good, self.filtered_out + ), + )) + } + }, + }) + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-cli-f10/src/db_utils.rs b/src/vit-servicing-station-f10/vit-servicing-station-cli-f10/src/db_utils.rs new file mode 100644 index 0000000000..324173250f --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-cli-f10/src/db_utils.rs @@ -0,0 +1,60 @@ +use std::fs; +use std::io; +use std::io::{Read, Write}; + +pub fn db_file_exists(db_url: &str) -> io::Result<()> { + // check if db file exists + if !std::path::Path::new(db_url).exists() { + return Err(io::Error::new( + io::ErrorKind::InvalidInput, + format!("{} url does not exists", db_url), + )); + } + Ok(()) +} + +pub fn backup_db_file(db_url: &str) -> io::Result { + db_file_exists(db_url)?; + let mut tmp_file = tempfile::NamedTempFile::new()?; + let content = fs::read(db_url)?; + tmp_file.write_all(&content)?; + Ok(tmp_file) +} + +pub fn restore_db_file(backup_file: tempfile::NamedTempFile, db_url: &str) -> io::Result<()> { + let mut backup_file = backup_file.reopen()?; + let mut buff = Vec::new(); + backup_file.read_to_end(&mut buff)?; + fs::write(db_url, &buff) +} + +#[cfg(test)] +mod test { + use crate::db_utils::{backup_db_file, restore_db_file}; + use std::{fs, io}; + + #[test] + fn backup_file() -> io::Result<()> { + let file_path = "./tmp_db.db"; + let content = b"foo bar"; + let content_vec = content.to_vec(); + // create a file with some content + fs::write(file_path, content)?; + + // backup the file + let tmp_file = backup_db_file(file_path)?; + + // write nonsense in old file + fs::write(file_path, b"bar foo")?; + + // restore file and read content, hopefully is the old one + restore_db_file(tmp_file, file_path)?; + let backup_content = fs::read(file_path)?; + fs::remove_file(file_path)?; + + // check written and actual content + assert_eq!(&content_vec, &backup_content); + + Ok(()) + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-cli-f10/src/init_db.rs b/src/vit-servicing-station-f10/vit-servicing-station-cli-f10/src/init_db.rs new file mode 100644 index 0000000000..13b7a2c02e --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-cli-f10/src/init_db.rs @@ -0,0 +1,44 @@ +use crate::task::ExecTask; +use structopt::StructOpt; +use thiserror::Error; +use vit_servicing_station_lib_f10::db::{ + load_db_connection_pool, migrations::initialize_db_with_migration, Error as DbPoolError, +}; + +#[derive(Error, Debug)] +pub enum Error { + #[error("Error connecting db pool")] + DbPoolError(#[from] DbPoolError), + + #[error("Error connecting to db")] + DbConnectionError(#[from] r2d2::Error), +} + +#[derive(Debug, PartialEq, StructOpt)] +pub enum Db { + /// Initialize a DB with the proper migrations, DB file is created if not exists. + Init { + /// URL of the vit-servicing-station database to interact with + #[structopt(long = "db-url")] + db_url: String, + }, +} + +impl Db { + fn init_with_migrations(db_url: &str) -> Result<(), Error> { + let pool = load_db_connection_pool(db_url)?; + let db_conn = pool.get()?; + initialize_db_with_migration(&db_conn); + Ok(()) + } +} + +impl ExecTask for Db { + type ResultValue = (); + type Error = Error; + fn exec(&self) -> Result { + match self { + Db::Init { db_url } => Db::init_with_migrations(db_url), + } + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-cli-f10/src/main.rs b/src/vit-servicing-station-f10/vit-servicing-station-cli-f10/src/main.rs new file mode 100644 index 0000000000..06addc9b13 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-cli-f10/src/main.rs @@ -0,0 +1,21 @@ +mod api_token; +mod app; +mod csv; +mod db_utils; +mod init_db; +mod task; + +use app::*; +use structopt::StructOpt; +use task::ExecTask; + +fn main() { + let app = CliApp::from_args(); + match app.exec() { + Ok(()) => (), + Err(e) => { + println!("Error: {}", e); + std::process::exit(1); + } + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-cli-f10/src/task.rs b/src/vit-servicing-station-f10/vit-servicing-station-cli-f10/src/task.rs new file mode 100644 index 0000000000..a536f5f0ba --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-cli-f10/src/task.rs @@ -0,0 +1,5 @@ +pub trait ExecTask { + type ResultValue; + type Error; + fn exec(&self) -> Result<::ResultValue, ::Error>; +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/Cargo.toml b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/Cargo.toml new file mode 100644 index 0000000000..9d377d23c2 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/Cargo.toml @@ -0,0 +1,43 @@ +[package] +name = "vit-servicing-station-lib-f10" +version = "0.5.0" +authors = ["danielsanchezq "] +edition = "2018" +license = "MIT OR Apache-2.0" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +async-trait = "0.1.33" +base64 = "0.12.1" +time = { version = "0.3", features = ["parsing", "formatting"] } +diesel = { version = "1.4.5", features = ["sqlite", "r2d2"] } +diesel_migrations = "1.4.0" +dotenv = "0.15" +itertools = "0.9.0" +log = { version = "0.4.11", features = ["serde"] } +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0.53" +strum = "0.21.0" +strum_macros = "0.21.1" +simplelog = "0.8.0" +structopt = "0.3.14" +thiserror = "1.0" +tokio = { version = "^1.0", features = ["macros", "signal", "rt", "fs", "sync"] } +tracing = "0.1" +tracing-futures = "0.2.4" +tracing-subscriber = "0.3" +warp = { version = "0.3", features = ["tls"] } +eccoxide = { git = "https://github.com/eugene-babichenko/eccoxide.git", branch = "fast-u64-scalar-mul", features = ["fast-u64-scalar-mul"], optional = true } +http-zipkin = "0.3.0" +notify = "5" + +# This solves building on windows when sqlite3lib is not installed or missing in the `$PATH` +# as it happens with the github actions pipeline associated to this project. +[target.'cfg(windows)'.dependencies] +libsqlite3-sys = { version = "0.9.3", features = ["bundled"] } + +[dev-dependencies] +tempfile = "3" +rand = "0.8" +jormungandr-lib = { workspace = true } diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/migrations/2020-05-22-112032_setup_db/down.sql b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/migrations/2020-05-22-112032_setup_db/down.sql new file mode 100644 index 0000000000..e9304bf326 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/migrations/2020-05-22-112032_setup_db/down.sql @@ -0,0 +1,11 @@ +-- This file should undo anything in `up.sql` +DROP TABLE IF EXISTS proposals; +DROP TABLE IF EXISTS funds; +DROP TABLE IF EXISTS voteplans; +DROP TABLE IF EXISTS api_tokens; +DROP TABLE IF EXISTS challenges; +DROP TABLE IF EXISTS proposal_simple_challenge; +DROP TABLE IF EXISTS proposal_community_choice_challenge; +DROP TABLE IF EXISTS community_advisors_reviews; +DROP VIEW IF EXISTS full_proposals_info; +DROP TABLE IF EXISTS goals; diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/migrations/2020-05-22-112032_setup_db/up.sql b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/migrations/2020-05-22-112032_setup_db/up.sql new file mode 100644 index 0000000000..c2170a28ad --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/migrations/2020-05-22-112032_setup_db/up.sql @@ -0,0 +1,150 @@ +create table funds +( + id INTEGER NOT NULL + primary key autoincrement, + fund_name VARCHAR NOT NULL, + fund_goal VARCHAR NOT NULL, + registration_snapshot_time BIGINT NOT NULL, + next_registration_snapshot_time BIGINT NOT NULL, + voting_power_threshold BIGINT NOT NULL, + fund_start_time BIGINT NOT NULL, + fund_end_time BIGINT NOT NULL, + next_fund_start_time BIGINT NOT NULL, + insight_sharing_start BIGINT NOT NULL, + proposal_submission_start BIGINT NOT NULL, + refine_proposals_start BIGINT NOT NULL, + finalize_proposals_start BIGINT NOT NULL, + proposal_assessment_start BIGINT NOT NULL, + assessment_qa_start BIGINT NOT NULL, + snapshot_start BIGINT NOT NULL, + voting_start BIGINT NOT NULL, + voting_end BIGINT NOT NULL, + tallying_end BIGINT NOT NULL, + results_url VARCHAR NOT NULL, + survey_url VARCHAR NOT NULL +); + +create table proposals +( + id INTEGER NOT NULL + primary key autoincrement, + proposal_id VARCHAR NOT NULL, + proposal_category VARCHAR NOT NULL, + proposal_title VARCHAR NOT NULL, + proposal_summary VARCHAR NOT NULL, + proposal_public_key VARCHAR NOT NULL, + proposal_funds BIGINT NOT NULL, + proposal_url VARCHAR NOT NULL, + proposal_files_url VARCHAR NOT NULL, + proposal_impact_score BIGINT NOT NULL, + proposer_name VARCHAR NOT NULL, + proposer_contact VARCHAR NOT NULL, + proposer_url VARCHAR NOT NULL, + proposer_relevant_experience VARCHAR NOT NULL, + chain_proposal_id BLOB NOT NULL, + chain_proposal_index BIGINT NOT NULL, + chain_vote_options VARCHAR NOT NULL, + chain_voteplan_id VARCHAR NOT NULL, + challenge_id INTEGER NOT NULL +); + +create table proposal_simple_challenge ( + proposal_id VARCHAR NOT NULL primary key, + proposal_solution VARCHAR +); + +create table proposal_community_choice_challenge ( + proposal_id VARCHAR NOT NULL primary key, + proposal_brief VARCHAR, + proposal_importance VARCHAR, + proposal_goal VARCHAR, + proposal_metrics VARCHAR +); + +create table voteplans +( + id INTEGER NOT NULL + primary key autoincrement, + chain_voteplan_id VARCHAR NOT NULL + unique, + chain_vote_start_time BIGINT NOT NULL, + chain_vote_end_time BIGINT NOT NULL, + chain_committee_end_time BIGINT NOT NULL, + chain_voteplan_payload VARCHAR NOT NULL, + chain_vote_encryption_key VARCHAR NOT NULL, + fund_id INTEGER NOT NULL +); + +create table api_tokens +( + token BLOB NOT NULL UNIQUE PRIMARY KEY , + creation_time BIGINT NOT NULL, + expire_time BIGINT NOT NULL +); + +create table challenges +( + internal_id INTEGER NOT NULL + primary key autoincrement, + id INTEGER NOT NULL UNIQUE, + challenge_type VARCHAR NOT NULL, + title VARCHAR NOT NULL, + description VARCHAR NOT NULL, + rewards_total BIGINT NOT NULL, + proposers_rewards BIGINT NOT NULL, + fund_id INTEGER NOT NULL, + challenge_url VARCHAR NOT NULL, + highlights VARCHAR +); + +create table community_advisors_reviews ( + id INTEGER NOT NULL primary key autoincrement, + proposal_id INTEGER NOT NULL, + assessor VARCHAR NOT NULL, + impact_alignment_rating_given INTEGER NOT NULL, + impact_alignment_note VARCHAR NOT NULL, + feasibility_rating_given INTEGER NOT NULL, + feasibility_note VARCHAR NOT NULL, + auditability_rating_given INTEGER NOT NULL, + auditability_note VARCHAR NOT NULL, + ranking INTEGER NOT NULL +); + +create table goals +( + id INTEGER NOT NULL + primary key autoincrement, + goal_name VARCHAR NOT NULL, + fund_id INTEGER NOT NULL, + FOREIGN KEY(fund_id) REFERENCES funds(id) +); + +CREATE VIEW full_proposals_info +AS +SELECT + proposals.*, + ifnull(reviews_count, 0) as reviews_count, + proposal_simple_challenge.proposal_solution, + proposal_community_choice_challenge.proposal_brief, + proposal_community_choice_challenge.proposal_importance, + proposal_community_choice_challenge.proposal_goal, + proposal_community_choice_challenge.proposal_metrics, + voteplans.chain_vote_start_time, + voteplans.chain_vote_end_time, + voteplans.chain_committee_end_time, + voteplans.chain_voteplan_payload, + voteplans.chain_vote_encryption_key, + voteplans.fund_id, + challenges.challenge_type +FROM + proposals + INNER JOIN voteplans ON proposals.chain_voteplan_id = voteplans.chain_voteplan_id + INNER JOIN challenges on challenges.id = proposals.challenge_id + LEFT JOIN proposal_simple_challenge + on proposals.proposal_id = proposal_simple_challenge.proposal_id + and (challenges.challenge_type = 'simple' or challenges.challenge_type = 'native') + LEFT JOIN proposal_community_choice_challenge + on proposals.proposal_id = proposal_community_choice_challenge.proposal_id + and challenges.challenge_type = 'community-choice' + LEFT JOIN (SELECT proposal_id as review_proposal_id, COUNT (DISTINCT assessor) as reviews_count FROM community_advisors_reviews GROUP BY proposal_id) + on proposals.proposal_id = review_proposal_id; diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/migrations.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/migrations.rs new file mode 100644 index 0000000000..ca462202d6 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/migrations.rs @@ -0,0 +1,7 @@ +use crate::db::DbConnection; + +embed_migrations!("./migrations"); + +pub fn initialize_db_with_migration(db_conn: &DbConnection) { + embedded_migrations::run(db_conn).unwrap(); +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/mod.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/mod.rs new file mode 100644 index 0000000000..500cc51ba1 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/mod.rs @@ -0,0 +1,37 @@ +pub mod migrations; +pub mod models; +pub mod queries; +pub mod schema; +pub mod views_schema; + +use diesel::r2d2::{ConnectionManager, Pool}; +use diesel::sqlite::SqliteConnection; +use diesel::Connection; + +pub type DbConnectionPool = Pool>; +pub type Error = Box; +// TODO: Right now this is forced as the current backend. But it should be abstracted so it works for any diesel::Backend +type Db = diesel::sqlite::Sqlite; +pub type DbConnection = SqliteConnection; + +// ⚠ WARNING ⚠ : This query is sqlite specific, would need to be changed if backend changes +const TEST_CONN_QUERY: &str = " +SELECT + name +FROM + sqlite_master +WHERE + type ='table' AND + name NOT LIKE 'sqlite_%'; +"; + +pub fn load_db_connection_pool(db_url: &str) -> Result { + let manager = ConnectionManager::::new(db_url); + let pool = Pool::builder().build(manager)?; + + // test db connection or bubble up error + let conn = pool.get()?; + conn.execute(TEST_CONN_QUERY)?; + + Ok(pool) +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/models/api_tokens.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/models/api_tokens.rs new file mode 100644 index 0000000000..618fddcdd7 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/models/api_tokens.rs @@ -0,0 +1,48 @@ +use crate::db::{schema::api_tokens, Db}; +use crate::v0::api_token::ApiToken; +use diesel::{ExpressionMethods, Insertable, Queryable}; + +#[derive(Debug, Clone)] +pub struct ApiTokenData { + pub token: ApiToken, + pub creation_time: i64, + pub expire_time: i64, +} + +impl Queryable for ApiTokenData { + type Row = ( + // 0 -> token + Vec, + // 1 -> creation_time + i64, + // 2-> expire_time + i64, + ); + + fn build(row: Self::Row) -> Self { + Self { + token: ApiToken::new(row.0), + creation_time: row.1, + expire_time: row.2, + } + } +} + +// This warning is disabled here. Values is only referenced as a type here. It should be ok not to +// split the types definitions. +#[allow(clippy::type_complexity)] +impl Insertable for ApiTokenData { + type Values = ( + diesel::dsl::Eq>, + diesel::dsl::Eq, + diesel::dsl::Eq, + ); + + fn values(self) -> Self::Values { + ( + api_tokens::token.eq(self.token.as_ref().to_vec()), + api_tokens::creation_time.eq(self.creation_time), + api_tokens::expire_time.eq(self.expire_time), + ) + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/models/challenges.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/models/challenges.rs new file mode 100644 index 0000000000..45432f8c73 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/models/challenges.rs @@ -0,0 +1,134 @@ +use crate::db::models::proposals::ChallengeType; +use crate::db::{schema::challenges, Db}; +use diesel::{ExpressionMethods, Insertable, Queryable}; +use serde::{Deserialize, Serialize}; + +#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq)] +pub struct ChallengeHighlights { + pub sponsor: String, +} + +#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq)] +pub struct Challenge { + #[serde(alias = "internalId")] + // this is used only to retain the original insert order + pub internal_id: i32, + pub id: i32, + #[serde(alias = "challengeType")] + pub challenge_type: ChallengeType, + pub title: String, + pub description: String, + #[serde(alias = "rewardsTotal")] + pub rewards_total: i64, + #[serde(alias = "proposersRewards")] + pub proposers_rewards: i64, + #[serde(alias = "fundId")] + pub fund_id: i32, + #[serde(alias = "challengeUrl")] + pub challenge_url: String, + pub highlights: Option, +} + +impl Queryable for Challenge { + type Row = ( + // 0 -> internal_id + i32, + // 1 -> id + i32, + // 2 -> challenge_type + String, + // 3 -> title + String, + // 4 -> description + String, + // 5 -> rewards_total + i64, + // 6 -> proposers_rewards + i64, + // 7 -> fund_id + i32, + // 8 -> fund_url + String, + // 9 -> challenge_highlights + Option, + ); + + fn build(row: Self::Row) -> Self { + Challenge { + internal_id: row.0, + id: row.1, + challenge_type: row.2.parse().unwrap(), + title: row.3, + description: row.4, + rewards_total: row.5, + proposers_rewards: row.6, + fund_id: row.7, + challenge_url: row.8, + // It should be ensured that the content is valid json + highlights: row.9.and_then(|v| serde_json::from_str(&v).ok()), + } + } +} + +impl Insertable for Challenge { + #[allow(clippy::type_complexity)] + type Values = ( + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq>, + ); + + fn values(self) -> Self::Values { + ( + challenges::id.eq(self.id), + challenges::challenge_type.eq(self.challenge_type.to_string()), + challenges::title.eq(self.title), + challenges::description.eq(self.description), + challenges::rewards_total.eq(self.rewards_total), + challenges::proposers_rewards.eq(self.proposers_rewards), + challenges::fund_id.eq(self.fund_id), + challenges::challenge_url.eq(self.challenge_url), + // This should always be a valid json + challenges::highlights.eq(serde_json::to_string(&self.highlights).ok()), + ) + } +} + +#[cfg(test)] +pub mod test { + use super::*; + use crate::db::DbConnectionPool; + use diesel::RunQueryDsl; + + pub fn get_test_challenge_with_fund_id(fund_id: i32) -> Challenge { + const CHALLENGE_ID: i32 = 9001; + const REWARDS_TOTAL: i64 = 100500; + Challenge { + internal_id: 1, + id: CHALLENGE_ID, + challenge_type: ChallengeType::CommunityChoice, + title: "challenge title".to_string(), + description: "challenge description".to_string(), + rewards_total: REWARDS_TOTAL, + proposers_rewards: REWARDS_TOTAL, + fund_id, + challenge_url: "http://example.com/".to_string(), + highlights: None, + } + } + + pub fn populate_db_with_challenge(challenge: &Challenge, pool: &DbConnectionPool) { + let connection = pool.get().unwrap(); + + diesel::insert_into(challenges::table) + .values(challenge.clone().values()) + .execute(&connection) + .unwrap(); + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/models/community_advisors_reviews.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/models/community_advisors_reviews.rs new file mode 100644 index 0000000000..88956bbdd3 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/models/community_advisors_reviews.rs @@ -0,0 +1,109 @@ +use crate::db::schema::community_advisors_reviews; + +use diesel::prelude::*; +use diesel::{ + backend::Backend, + deserialize::{self, FromSql}, + sql_types::Integer, + FromSqlRow, Insertable, Queryable, +}; +use serde::{Deserialize, Serialize}; + +#[derive(Copy, Clone, PartialEq, Eq, Debug, Serialize, FromSqlRow, Deserialize)] +pub enum ReviewRanking { + Excellent = 0, + Good = 1, + FilteredOut = 2, + NA = 3, // not reviewed by vCAs +} + +impl FromSql for ReviewRanking +where + DB: Backend, + i32: FromSql, +{ + fn from_sql(bytes: Option<&DB::RawValue>) -> deserialize::Result { + match i32::from_sql(bytes)? { + 0 => Ok(ReviewRanking::Excellent), + 1 => Ok(ReviewRanking::Good), + 2 => Ok(ReviewRanking::FilteredOut), + 3 => Ok(ReviewRanking::NA), + x => Err(format!("Unrecognized variant {}", x).into()), + } + } +} + +#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, Queryable)] +pub struct AdvisorReview { + pub id: i32, + pub proposal_id: i32, + pub assessor: String, + pub impact_alignment_rating_given: i32, + pub impact_alignment_note: String, + pub feasibility_rating_given: i32, + pub feasibility_note: String, + pub auditability_rating_given: i32, + pub auditability_note: String, + pub ranking: ReviewRanking, +} + +impl Insertable for AdvisorReview { + #[allow(clippy::type_complexity)] + type Values = ( + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + ); + + fn values(self) -> Self::Values { + ( + community_advisors_reviews::proposal_id.eq(self.proposal_id), + community_advisors_reviews::assessor.eq(self.assessor), + community_advisors_reviews::impact_alignment_rating_given + .eq(self.impact_alignment_rating_given), + community_advisors_reviews::impact_alignment_note.eq(self.impact_alignment_note), + community_advisors_reviews::feasibility_rating_given.eq(self.feasibility_rating_given), + community_advisors_reviews::feasibility_note.eq(self.feasibility_note), + community_advisors_reviews::auditability_rating_given + .eq(self.auditability_rating_given), + community_advisors_reviews::auditability_note.eq(self.auditability_note), + community_advisors_reviews::ranking.eq(self.ranking as i32), + ) + } +} + +#[cfg(test)] +pub mod test { + use super::*; + use crate::db::DbConnectionPool; + use diesel::RunQueryDsl; + + pub fn get_test_advisor_review_with_proposal_id(proposal_id: i32) -> AdvisorReview { + AdvisorReview { + id: 0, + proposal_id, + assessor: "foo bar".to_string(), + impact_alignment_rating_given: 0, + impact_alignment_note: "impact note".to_string(), + feasibility_rating_given: 0, + feasibility_note: "feasibility note".to_string(), + auditability_rating_given: 0, + auditability_note: "auditability".to_string(), + ranking: ReviewRanking::Good, + } + } + + pub fn populate_db_with_advisor_review(review: &AdvisorReview, pool: &DbConnectionPool) { + let connection = pool.get().unwrap(); + diesel::insert_into(community_advisors_reviews::table) + .values(review.clone().values()) + .execute(&connection) + .unwrap(); + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/models/funds.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/models/funds.rs new file mode 100644 index 0000000000..3016ec4f0c --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/models/funds.rs @@ -0,0 +1,350 @@ +use crate::db::{ + models::{challenges::Challenge, goals::Goal, voteplans::Voteplan}, + schema::funds, + Db, +}; +use diesel::{ExpressionMethods, Insertable, Queryable}; +use serde::{Deserialize, Serialize}; + +#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq)] +pub struct Fund { + #[serde(default = "Default::default")] + pub id: i32, + #[serde(alias = "fundName")] + pub fund_name: String, + #[serde(alias = "fundGoal")] + pub fund_goal: String, + #[serde(alias = "votingPowerThreshold")] + pub voting_power_threshold: i64, + #[serde(alias = "fundStartTime")] + #[serde(deserialize_with = "crate::utils::serde::deserialize_unix_timestamp_from_rfc3339")] + #[serde(serialize_with = "crate::utils::serde::serialize_unix_timestamp_as_rfc3339")] + pub fund_start_time: i64, + #[serde(alias = "fundEndTime")] + #[serde(deserialize_with = "crate::utils::serde::deserialize_unix_timestamp_from_rfc3339")] + #[serde(serialize_with = "crate::utils::serde::serialize_unix_timestamp_as_rfc3339")] + pub fund_end_time: i64, + #[serde(alias = "nextFundStartTime")] + #[serde(deserialize_with = "crate::utils::serde::deserialize_unix_timestamp_from_rfc3339")] + #[serde(serialize_with = "crate::utils::serde::serialize_unix_timestamp_as_rfc3339")] + pub next_fund_start_time: i64, + #[serde(alias = "registrationSnapshotTime")] + #[serde(deserialize_with = "crate::utils::serde::deserialize_unix_timestamp_from_rfc3339")] + #[serde(serialize_with = "crate::utils::serde::serialize_unix_timestamp_as_rfc3339")] + pub registration_snapshot_time: i64, + #[serde(deserialize_with = "crate::utils::serde::deserialize_unix_timestamp_from_rfc3339")] + #[serde(serialize_with = "crate::utils::serde::serialize_unix_timestamp_as_rfc3339")] + pub next_registration_snapshot_time: i64, + #[serde(alias = "chainVotePlans", default = "Vec::new")] + pub chain_vote_plans: Vec, + #[serde(default = "Vec::new")] + pub challenges: Vec, + #[serde(alias = "stageDates", flatten)] + pub stage_dates: FundStageDates, + #[serde(default = "Vec::new")] + pub goals: Vec, + #[serde(alias = "resultsUrl")] + pub results_url: String, + #[serde(alias = "surveyUrl")] + pub survey_url: String, +} + +#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq)] +pub struct FundStageDates { + #[serde(deserialize_with = "crate::utils::serde::deserialize_unix_timestamp_from_rfc3339")] + #[serde(serialize_with = "crate::utils::serde::serialize_unix_timestamp_as_rfc3339")] + pub insight_sharing_start: i64, + #[serde(deserialize_with = "crate::utils::serde::deserialize_unix_timestamp_from_rfc3339")] + #[serde(serialize_with = "crate::utils::serde::serialize_unix_timestamp_as_rfc3339")] + pub proposal_submission_start: i64, + #[serde(deserialize_with = "crate::utils::serde::deserialize_unix_timestamp_from_rfc3339")] + #[serde(serialize_with = "crate::utils::serde::serialize_unix_timestamp_as_rfc3339")] + pub refine_proposals_start: i64, + #[serde(deserialize_with = "crate::utils::serde::deserialize_unix_timestamp_from_rfc3339")] + #[serde(serialize_with = "crate::utils::serde::serialize_unix_timestamp_as_rfc3339")] + pub finalize_proposals_start: i64, + #[serde(deserialize_with = "crate::utils::serde::deserialize_unix_timestamp_from_rfc3339")] + #[serde(serialize_with = "crate::utils::serde::serialize_unix_timestamp_as_rfc3339")] + pub proposal_assessment_start: i64, + #[serde(deserialize_with = "crate::utils::serde::deserialize_unix_timestamp_from_rfc3339")] + #[serde(serialize_with = "crate::utils::serde::serialize_unix_timestamp_as_rfc3339")] + pub assessment_qa_start: i64, + #[serde(deserialize_with = "crate::utils::serde::deserialize_unix_timestamp_from_rfc3339")] + #[serde(serialize_with = "crate::utils::serde::serialize_unix_timestamp_as_rfc3339")] + pub snapshot_start: i64, + #[serde(deserialize_with = "crate::utils::serde::deserialize_unix_timestamp_from_rfc3339")] + #[serde(serialize_with = "crate::utils::serde::serialize_unix_timestamp_as_rfc3339")] + pub voting_start: i64, + #[serde(deserialize_with = "crate::utils::serde::deserialize_unix_timestamp_from_rfc3339")] + #[serde(serialize_with = "crate::utils::serde::serialize_unix_timestamp_as_rfc3339")] + pub voting_end: i64, + #[serde(deserialize_with = "crate::utils::serde::deserialize_unix_timestamp_from_rfc3339")] + #[serde(serialize_with = "crate::utils::serde::serialize_unix_timestamp_as_rfc3339")] + pub tallying_end: i64, +} + +#[derive(Serialize)] +struct FundWithLegacyFields { + id: i32, + fund_name: String, + fund_goal: String, + voting_power_threshold: i64, + #[serde(serialize_with = "crate::utils::serde::serialize_unix_timestamp_as_rfc3339")] + fund_start_time: i64, + #[serde(serialize_with = "crate::utils::serde::serialize_unix_timestamp_as_rfc3339")] + fund_end_time: i64, + #[serde(serialize_with = "crate::utils::serde::serialize_unix_timestamp_as_rfc3339")] + next_fund_start_time: i64, + #[serde(serialize_with = "crate::utils::serde::serialize_unix_timestamp_as_rfc3339")] + registration_snapshot_time: i64, + #[serde(serialize_with = "crate::utils::serde::serialize_unix_timestamp_as_rfc3339")] + next_registration_snapshot_time: i64, + chain_vote_plans: Vec, + challenges: Vec, +} + +impl From for FundWithLegacyFields { + fn from(fund: Fund) -> Self { + FundWithLegacyFields { + id: fund.id, + fund_name: fund.fund_name, + fund_goal: fund.fund_goal, + voting_power_threshold: fund.voting_power_threshold, + fund_start_time: fund.fund_start_time, + fund_end_time: fund.fund_end_time, + next_fund_start_time: fund.next_fund_start_time, + registration_snapshot_time: fund.registration_snapshot_time, + next_registration_snapshot_time: fund.next_registration_snapshot_time, + chain_vote_plans: fund.chain_vote_plans, + challenges: fund.challenges, + } + } +} + +impl Queryable for Fund { + type Row = ( + // 0 -> id + i32, + // 1 -> fund_name + String, + // 2 -> fund_goal + String, + // 3 -> registration_snapshot_time + i64, + // 4 -> next_registration_snapshot_time + i64, + // 5 -> voting_power_threshold + i64, + // 6 -> fund_start_time + i64, + // 7 -> fund_end_time + i64, + // 8 -> next_fund_start_time + i64, + // insight_sharing_start + i64, + // proposal_submission_start + i64, + // refine_proposals_start + i64, + // finalize_proposals_start + i64, + // proposal_assessment_start + i64, + // assessment_qa_start + i64, + // snapshot_start + i64, + // voting_start + i64, + // voting_end + i64, + // tallying_end + i64, + // results_url + String, + // survey_url + String, + ); + + fn build(row: Self::Row) -> Self { + Fund { + id: row.0, + fund_name: row.1, + fund_goal: row.2, + registration_snapshot_time: row.3, + next_registration_snapshot_time: row.4, + voting_power_threshold: row.5, + fund_start_time: row.6, + fund_end_time: row.7, + next_fund_start_time: row.8, + chain_vote_plans: vec![], + challenges: vec![], + stage_dates: FundStageDates { + insight_sharing_start: row.9, + proposal_submission_start: row.10, + refine_proposals_start: row.11, + finalize_proposals_start: row.12, + proposal_assessment_start: row.13, + assessment_qa_start: row.14, + snapshot_start: row.15, + voting_start: row.16, + voting_end: row.17, + tallying_end: row.18, + }, + goals: vec![], + results_url: row.19, + survey_url: row.20, + } + } +} + +// This warning is disabled here. Values is only referenced as a type here. It should be ok not to +// split the types definitions. +#[allow(clippy::type_complexity)] +impl Insertable for Fund { + type Values = ( + Option>, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + ); + + fn values(self) -> Self::Values { + let id_item = if self.id == 0 { + None + } else { + Some(funds::id.eq(self.id)) + }; + ( + id_item, + funds::fund_name.eq(self.fund_name), + funds::fund_goal.eq(self.fund_goal), + funds::registration_snapshot_time.eq(self.registration_snapshot_time), + funds::next_registration_snapshot_time.eq(self.next_registration_snapshot_time), + funds::voting_power_threshold.eq(self.voting_power_threshold), + funds::fund_start_time.eq(self.fund_start_time), + funds::fund_end_time.eq(self.fund_end_time), + funds::next_fund_start_time.eq(self.next_fund_start_time), + funds::insight_sharing_start.eq(self.stage_dates.insight_sharing_start), + funds::proposal_submission_start.eq(self.stage_dates.proposal_submission_start), + funds::refine_proposals_start.eq(self.stage_dates.refine_proposals_start), + funds::finalize_proposals_start.eq(self.stage_dates.finalize_proposals_start), + funds::proposal_assessment_start.eq(self.stage_dates.proposal_assessment_start), + funds::assessment_qa_start.eq(self.stage_dates.assessment_qa_start), + funds::snapshot_start.eq(self.stage_dates.snapshot_start), + funds::voting_start.eq(self.stage_dates.voting_start), + funds::voting_end.eq(self.stage_dates.voting_end), + funds::tallying_end.eq(self.stage_dates.tallying_end), + funds::results_url.eq(self.results_url), + funds::survey_url.eq(self.survey_url), + ) + } +} + +#[cfg(test)] +pub mod test { + use crate::db::{ + models::{ + challenges::test as challenges_testing, + funds::{Fund, FundStageDates}, + goals::{Goal, InsertGoal}, + voteplans::test as voteplans_testing, + }, + schema::{funds, goals}, + DbConnectionPool, + }; + + use diesel::{Insertable, RunQueryDsl}; + use time::{Duration, OffsetDateTime}; + + pub fn get_test_fund(fund_id: Option) -> Fund { + const FUND_ID: i32 = 42; + let fund_id = fund_id.unwrap_or(FUND_ID); + + Fund { + id: fund_id, + fund_name: "hey oh let's go".to_string(), + fund_goal: "test this endpoint".to_string(), + registration_snapshot_time: (OffsetDateTime::now_utc() + Duration::days(3)) + .unix_timestamp(), + next_registration_snapshot_time: (OffsetDateTime::now_utc() + Duration::days(30)) + .unix_timestamp(), + voting_power_threshold: 100, + fund_start_time: OffsetDateTime::now_utc().unix_timestamp(), + fund_end_time: OffsetDateTime::now_utc().unix_timestamp(), + next_fund_start_time: OffsetDateTime::now_utc().unix_timestamp(), + chain_vote_plans: vec![voteplans_testing::get_test_voteplan_with_fund_id(fund_id)], + challenges: vec![challenges_testing::get_test_challenge_with_fund_id(fund_id)], + stage_dates: FundStageDates { + insight_sharing_start: OffsetDateTime::now_utc().unix_timestamp(), + proposal_submission_start: OffsetDateTime::now_utc().unix_timestamp(), + refine_proposals_start: OffsetDateTime::now_utc().unix_timestamp(), + finalize_proposals_start: OffsetDateTime::now_utc().unix_timestamp(), + proposal_assessment_start: OffsetDateTime::now_utc().unix_timestamp(), + assessment_qa_start: OffsetDateTime::now_utc().unix_timestamp(), + snapshot_start: OffsetDateTime::now_utc().unix_timestamp(), + voting_start: OffsetDateTime::now_utc().unix_timestamp(), + voting_end: OffsetDateTime::now_utc().unix_timestamp(), + tallying_end: OffsetDateTime::now_utc().unix_timestamp(), + }, + goals: vec![Goal { + id: 1, + goal_name: "goal1".into(), + fund_id, + }], + results_url: format!("http://localhost/fund/{FUND_ID}/results/"), + survey_url: format!("http://localhost/fund/{FUND_ID}/survey/"), + } + } + + pub fn populate_db_with_fund(fund: &Fund, pool: &DbConnectionPool) { + let values = fund.clone().values(); + + // Warning! mind this scope: r2d2 pooled connection behaviour depend of the scope. Looks like + // if the connection is not out of scope, when giving the reference to the next function + // call below it creates a wrong connection (where there are not tables even if they were loaded). + { + let connection = pool.get().unwrap(); + diesel::insert_into(funds::table) + .values(values) + .execute(&connection) + .unwrap(); + } + + for voteplan in &fund.chain_vote_plans { + voteplans_testing::populate_db_with_voteplan(voteplan, pool); + } + + for challenge in &fund.challenges { + challenges_testing::populate_db_with_challenge(challenge, pool); + } + + { + let connection = pool.get().unwrap(); + for goal in &fund.goals { + diesel::insert_into(goals::table) + .values(InsertGoal::from(goal)) + .execute(&connection) + .unwrap(); + } + } + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/models/goals.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/models/goals.rs new file mode 100644 index 0000000000..6c51c76e33 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/models/goals.rs @@ -0,0 +1,37 @@ +use crate::db::schema::goals; +use diesel::{Insertable, Queryable}; +use serde::{Deserialize, Serialize}; + +pub use goals_impl::InsertGoal; + +#[derive(Serialize, Deserialize, Queryable, Clone, Debug, PartialEq, Eq)] +#[diesel(table_name = goals)] +pub struct Goal { + pub id: i32, + #[serde(alias = "goalName")] + pub goal_name: String, + #[serde(alias = "fundId")] + pub fund_id: i32, +} + +mod goals_impl { + #![allow(clippy::extra_unused_lifetimes)] + + use super::*; + + #[derive(Deserialize, Insertable, Clone, Debug)] + #[table_name = "goals"] + pub struct InsertGoal { + pub goal_name: String, + pub fund_id: i32, + } +} + +impl From<&Goal> for InsertGoal { + fn from(g: &Goal) -> Self { + Self { + goal_name: g.goal_name.clone(), + fund_id: g.fund_id, + } + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/models/mod.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/models/mod.rs new file mode 100644 index 0000000000..91e360617a --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/models/mod.rs @@ -0,0 +1,8 @@ +pub mod api_tokens; +pub mod challenges; +pub mod community_advisors_reviews; +pub mod funds; +pub mod goals; +pub mod proposals; +pub mod vote_options; +pub mod voteplans; diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/models/proposals.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/models/proposals.rs new file mode 100644 index 0000000000..b5d7d2b50d --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/models/proposals.rs @@ -0,0 +1,534 @@ +use super::vote_options; +use crate::db::models::vote_options::VoteOptions; +use crate::db::{schema::proposals, views_schema::full_proposals_info, Db}; +use diesel::{ExpressionMethods, Insertable, Queryable}; +use serde::{de::Error, Deserialize, Deserializer, Serialize, Serializer}; +use std::convert::{TryFrom, TryInto}; + +pub mod community_choice; +pub mod simple; + +#[derive(Serialize, Deserialize, PartialEq, Eq, Debug, Clone)] +pub struct Category { + #[serde(alias = "categoryId", default = "Default::default")] + pub category_id: String, + #[serde(alias = "categoryName")] + pub category_name: String, + #[serde(alias = "categoryDescription", default = "Default::default")] + pub category_description: String, +} + +#[derive(Serialize, Deserialize, PartialEq, Eq, Debug, Clone)] +pub struct Proposer { + #[serde(alias = "proposerName")] + pub proposer_name: String, + #[serde(alias = "proposerEmail")] + pub proposer_email: String, + #[serde(alias = "proposerUrl")] + pub proposer_url: String, + #[serde(alias = "proposerRelevantExperience")] + pub proposer_relevant_experience: String, +} + +#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq)] +#[serde(rename_all = "kebab-case")] +pub enum ChallengeType { + Simple, + CommunityChoice, + Native, +} + +impl std::str::FromStr for ChallengeType { + type Err = std::io::Error; + + fn from_str(s: &str) -> Result { + match s { + "simple" => Ok(ChallengeType::Simple), + "community-choice" => Ok(ChallengeType::CommunityChoice), + "native" => Ok(ChallengeType::Native), + s => Err(std::io::Error::new( + std::io::ErrorKind::InvalidData, + format!( + "Expected any of [simple | community-choice | native], found: {}", + s + ), + )), + } + } +} + +impl std::fmt::Display for ChallengeType { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + // should be implemented and safe to unwrap here + let repr = serde_json::to_string(&self).unwrap(); + write!(f, "{}", repr.trim_matches('"')) + } +} + +#[derive(Serialize, Deserialize, PartialEq, Eq, Debug, Clone)] +pub struct Proposal { + #[serde(alias = "internalId")] + pub internal_id: i32, + #[serde(alias = "proposalId")] + pub proposal_id: String, + #[serde(alias = "category")] + pub proposal_category: Category, + #[serde(alias = "proposalTitle")] + pub proposal_title: String, + #[serde(alias = "proposalSummary")] + pub proposal_summary: String, + #[serde(alias = "proposalPublicKey")] + pub proposal_public_key: String, + #[serde(alias = "proposalFunds")] + pub proposal_funds: i64, + #[serde(alias = "proposalUrl")] + pub proposal_url: String, + #[serde(alias = "proposalFilesUrl")] + pub proposal_files_url: String, + #[serde(alias = "proposalImpactScore")] + pub proposal_impact_score: i64, + pub proposer: Proposer, + #[serde(alias = "chainProposalId")] + #[serde(serialize_with = "crate::utils::serde::serialize_bin_as_str")] + #[serde(deserialize_with = "crate::utils::serde::deserialize_string_as_bytes")] + pub chain_proposal_id: Vec, + #[serde(alias = "chainProposalIndex")] + pub chain_proposal_index: i64, + #[serde(alias = "chainVoteOptions")] + pub chain_vote_options: VoteOptions, + #[serde(alias = "chainVoteplanId")] + pub chain_voteplan_id: String, + #[serde(alias = "chainVoteStartTime", default = "Default::default")] + #[serde(serialize_with = "crate::utils::serde::serialize_unix_timestamp_as_rfc3339")] + #[serde(deserialize_with = "crate::utils::serde::deserialize_unix_timestamp_from_rfc3339")] + pub chain_vote_start_time: i64, + #[serde(alias = "chainVoteEndTime", default = "Default::default")] + #[serde(serialize_with = "crate::utils::serde::serialize_unix_timestamp_as_rfc3339")] + #[serde(deserialize_with = "crate::utils::serde::deserialize_unix_timestamp_from_rfc3339")] + pub chain_vote_end_time: i64, + #[serde(alias = "chainCommitteeEndTime", default = "Default::default")] + #[serde(serialize_with = "crate::utils::serde::serialize_unix_timestamp_as_rfc3339")] + #[serde(deserialize_with = "crate::utils::serde::deserialize_unix_timestamp_from_rfc3339")] + pub chain_committee_end_time: i64, + #[serde(alias = "chainVoteplanPayload")] + pub chain_voteplan_payload: String, + #[serde(alias = "chainVoteEncryptionKey")] + pub chain_vote_encryption_key: String, + #[serde(alias = "fundId")] + pub fund_id: i32, + #[serde(alias = "challengeId")] + pub challenge_id: i32, + #[serde(alias = "reviewsCount")] + pub reviews_count: i32, +} + +#[derive(PartialEq, Eq, Debug, Clone)] +pub enum ProposalChallengeInfo { + Simple(simple::ChallengeInfo), + CommunityChoice(community_choice::ChallengeInfo), +} + +#[derive(Serialize, Deserialize)] +struct SerdeProposalChallengeInfo { + #[serde(flatten, default, skip_serializing_if = "Option::is_none")] + simple: Option, + #[serde(flatten, default, skip_serializing_if = "Option::is_none")] + community: Option, +} + +#[derive(Serialize, Deserialize, PartialEq, Eq, Debug, Clone)] +pub struct FullProposalInfo { + #[serde(flatten)] + pub proposal: Proposal, + #[serde(flatten)] + pub challenge_info: ProposalChallengeInfo, + #[serde(alias = "challengeType")] + pub challenge_type: ChallengeType, +} + +impl Serialize for ProposalChallengeInfo { + fn serialize(&self, serializer: S) -> Result<::Ok, ::Error> + where + S: Serializer, + { + let serde_data: SerdeProposalChallengeInfo = self.clone().into(); + serde_data.serialize(serializer) + } +} + +impl<'de> Deserialize<'de> for ProposalChallengeInfo { + fn deserialize(deserializer: D) -> Result>::Error> + where + D: Deserializer<'de>, + { + let serde_data: SerdeProposalChallengeInfo = + SerdeProposalChallengeInfo::deserialize(deserializer)?; + serde_data.try_into().map_err(|_| { + >::Error::custom("Invalid data for ProposalChallengeInfo") + }) + } +} + +type FullProposalsInfoRow = ( + // 0 ->id + i32, + // 1 -> proposal_id + String, + // 2-> category_name + String, + // 3 -> proposal_title + String, + // 4 -> proposal_summary + String, + // 5 -> proposal_public_key + String, + // 6 -> proposal_funds + i64, + // 7 -> proposal_url + String, + // 8 -> proposal_files_url, + String, + // 9 -> proposal_impact_score + i64, + // 10 -> proposer_name + String, + // 11 -> proposer_contact + String, + // 12 -> proposer_url + String, + // 13 -> proposer_relevant_experience + String, + // 14 -> chain_proposal_id + Vec, + // 15 -> chain_proposal_index + i64, + // 16 -> chain_vote_options + String, + // 17 -> chain_voteplan_id + String, + // 18 -> chain_vote_starttime + i64, + // 19 -> chain_vote_endtime + i64, + // 20 -> chain_committee_end_time + i64, + // 21 -> chain_voteplan_payload + String, + // 22 -> chain_vote_encryption_key + String, + // 23 -> fund_id + i32, + // 24 -> challenge_id + i32, + // 25 -> reviews_count + i32, + // 26 -> challenge_type + String, + // 27 -> proposal_solution + Option, + // 28 -> proposal_brief + Option, + // 29 -> proposal_importance + Option, + // 30 -> proposal_goal + Option, + // 31 -> proposal_metrics + Option, +); + +impl Queryable for Proposal { + type Row = FullProposalsInfoRow; + + fn build(row: Self::Row) -> Self { + Proposal { + internal_id: row.0, + proposal_id: row.1, + proposal_category: Category { + category_id: "".to_string(), + category_name: row.2, + category_description: "".to_string(), + }, + proposal_title: row.3, + proposal_summary: row.4, + proposal_public_key: row.5, + proposal_funds: row.6, + proposal_url: row.7, + proposal_files_url: row.8, + proposal_impact_score: row.9, + proposer: Proposer { + proposer_name: row.10, + proposer_email: row.11, + proposer_url: row.12, + proposer_relevant_experience: row.13, + }, + chain_proposal_id: row.14, + chain_proposal_index: row.15, + chain_vote_options: vote_options::VoteOptions::parse_coma_separated_value(&row.16), + chain_voteplan_id: row.17, + chain_vote_start_time: row.18, + chain_vote_end_time: row.19, + chain_committee_end_time: row.20, + chain_voteplan_payload: row.21, + chain_vote_encryption_key: row.22, + fund_id: row.23, + challenge_id: row.24, + reviews_count: row.25, + } + } +} + +impl Queryable for FullProposalInfo { + type Row = FullProposalsInfoRow; + + fn build(row: Self::Row) -> Self { + let challenge_type = row.26.parse().unwrap(); + // It should be safe to unwrap this values here if DB is sanitized and hence tables have data + // relative to the challenge type. + let challenge_info = match challenge_type { + ChallengeType::Simple | ChallengeType::Native => { + ProposalChallengeInfo::Simple(simple::ChallengeInfo { + proposal_solution: row.27.clone().unwrap(), + }) + } + ChallengeType::CommunityChoice => { + ProposalChallengeInfo::CommunityChoice(community_choice::ChallengeInfo { + proposal_brief: row.28.clone().unwrap(), + proposal_importance: row.29.clone().unwrap(), + proposal_goal: row.30.clone().unwrap(), + proposal_metrics: row.31.clone().unwrap(), + }) + } + }; + FullProposalInfo { + proposal: Proposal::build(row), + challenge_info, + challenge_type, + } + } +} + +// This warning is disabled here. Values is only referenced as a type here. It should be ok not to +// split the types definitions. +#[allow(clippy::type_complexity)] +impl Insertable for Proposal { + type Values = ( + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq>, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + ); + + fn values(self) -> Self::Values { + ( + proposals::proposal_id.eq(self.proposal_id), + proposals::proposal_category.eq(self.proposal_category.category_name), + proposals::proposal_title.eq(self.proposal_title), + proposals::proposal_summary.eq(self.proposal_summary), + proposals::proposal_public_key.eq(self.proposal_public_key), + proposals::proposal_funds.eq(self.proposal_funds), + proposals::proposal_url.eq(self.proposal_url), + proposals::proposal_files_url.eq(self.proposal_files_url), + proposals::proposal_impact_score.eq(self.proposal_impact_score), + proposals::proposer_name.eq(self.proposer.proposer_name), + proposals::proposer_contact.eq(self.proposer.proposer_email), + proposals::proposer_url.eq(self.proposer.proposer_url), + proposals::proposer_relevant_experience.eq(self.proposer.proposer_relevant_experience), + proposals::chain_proposal_id.eq(self.chain_proposal_id), + proposals::chain_proposal_index.eq(self.chain_proposal_index), + proposals::chain_vote_options.eq(self.chain_vote_options.as_csv_string()), + proposals::chain_voteplan_id.eq(self.chain_voteplan_id), + proposals::challenge_id.eq(self.challenge_id), + ) + } +} + +struct SerdeToProposalChallengeInfoError; + +impl TryFrom for ProposalChallengeInfo { + type Error = SerdeToProposalChallengeInfoError; + + fn try_from(data: SerdeProposalChallengeInfo) -> Result { + let SerdeProposalChallengeInfo { simple, community } = data; + match (simple, community) { + (None, None) | (Some(_), Some(_)) => Err(SerdeToProposalChallengeInfoError), + (Some(simple), None) => Ok(ProposalChallengeInfo::Simple(simple)), + (None, Some(community_challenge)) => { + Ok(ProposalChallengeInfo::CommunityChoice(community_challenge)) + } + } + } +} + +impl From for SerdeProposalChallengeInfo { + fn from(data: ProposalChallengeInfo) -> Self { + match data { + ProposalChallengeInfo::Simple(simple) => SerdeProposalChallengeInfo { + simple: Some(simple), + community: None, + }, + ProposalChallengeInfo::CommunityChoice(community) => SerdeProposalChallengeInfo { + simple: None, + community: Some(community), + }, + } + } +} + +#[cfg(test)] +pub mod test { + use super::*; + use crate::db::{ + models::vote_options::VoteOptions, + schema::{ + proposal_community_choice_challenge, proposal_simple_challenge, proposals, voteplans, + }, + DbConnectionPool, + }; + use diesel::{ExpressionMethods, RunQueryDsl}; + use time::OffsetDateTime; + + pub fn get_test_proposal() -> FullProposalInfo { + const CHALLENGE_ID: i32 = 9001; + + FullProposalInfo { + proposal: Proposal { + internal_id: 1, + proposal_id: "1".to_string(), + proposal_category: Category { + category_id: "".to_string(), + category_name: "foo_category_name".to_string(), + category_description: "".to_string(), + }, + proposal_title: "the proposal".to_string(), + proposal_summary: "the proposal summary".to_string(), + proposal_public_key: "pubkey".to_string(), + proposal_funds: 10000, + proposal_url: "http://foo.bar".to_string(), + proposal_files_url: "http://foo.bar/files".to_string(), + proposal_impact_score: 100, + reviews_count: 0, + proposer: Proposer { + proposer_name: "tester".to_string(), + proposer_email: "tester@tester.tester".to_string(), + proposer_url: "http://tester.tester".to_string(), + proposer_relevant_experience: "ilumination".to_string(), + }, + chain_proposal_id: b"foobar".to_vec(), + chain_proposal_index: 0, + chain_vote_options: VoteOptions::parse_coma_separated_value("b,a,r"), + chain_voteplan_id: "voteplain_id".to_string(), + chain_vote_start_time: OffsetDateTime::now_utc().unix_timestamp(), + chain_vote_end_time: OffsetDateTime::now_utc().unix_timestamp(), + chain_committee_end_time: OffsetDateTime::now_utc().unix_timestamp(), + chain_voteplan_payload: "none".to_string(), + chain_vote_encryption_key: "none".to_string(), + fund_id: 1, + challenge_id: CHALLENGE_ID, + }, + challenge_info: ProposalChallengeInfo::CommunityChoice( + community_choice::ChallengeInfo { + proposal_brief: "A for ADA".to_string(), + proposal_importance: "We need to get them while they're young.".to_string(), + proposal_goal: "Nebulous".to_string(), + proposal_metrics: + "\\- Number of people engaged into the creation of Cryptoalphabet" + .to_string(), + }, + ), + challenge_type: ChallengeType::CommunityChoice, + } + } + + pub fn populate_db_with_proposal(full_proposal: &FullProposalInfo, pool: &DbConnectionPool) { + let connection = pool.get().unwrap(); + let proposal = &full_proposal.proposal; + // insert the proposal information + let values = ( + proposals::proposal_id.eq(proposal.proposal_id.clone()), + proposals::proposal_category.eq(proposal.proposal_category.category_name.clone()), + proposals::proposal_title.eq(proposal.proposal_title.clone()), + proposals::proposal_summary.eq(proposal.proposal_summary.clone()), + proposals::proposal_public_key.eq(proposal.proposal_public_key.clone()), + proposals::proposal_funds.eq(proposal.proposal_funds), + proposals::proposal_url.eq(proposal.proposal_url.clone()), + proposals::proposal_files_url.eq(proposal.proposal_files_url.clone()), + proposals::proposal_impact_score.eq(proposal.proposal_impact_score), + proposals::proposer_name.eq(proposal.proposer.proposer_name.clone()), + proposals::proposer_contact.eq(proposal.proposer.proposer_email.clone()), + proposals::proposer_url.eq(proposal.proposer.proposer_url.clone()), + proposals::proposer_relevant_experience + .eq(proposal.proposer.proposer_relevant_experience.clone()), + proposals::chain_proposal_id.eq(proposal.chain_proposal_id.clone()), + proposals::chain_proposal_index.eq(proposal.chain_proposal_index), + proposals::chain_vote_options.eq(proposal.chain_vote_options.as_csv_string()), + proposals::chain_voteplan_id.eq(proposal.chain_voteplan_id.clone()), + proposals::challenge_id.eq(proposal.challenge_id), + ); + + diesel::insert_into(proposals::table) + .values(values) + .execute(&connection) + .unwrap(); + + // insert the related fund voteplan information + let voteplan_values = ( + voteplans::chain_voteplan_id.eq(proposal.chain_voteplan_id.clone()), + voteplans::chain_vote_start_time.eq(proposal.chain_vote_start_time), + voteplans::chain_vote_end_time.eq(proposal.chain_vote_end_time), + voteplans::chain_committee_end_time.eq(proposal.chain_committee_end_time), + voteplans::chain_voteplan_payload.eq(proposal.chain_voteplan_payload.clone()), + voteplans::chain_vote_encryption_key.eq(proposal.chain_vote_encryption_key.clone()), + voteplans::fund_id.eq(proposal.fund_id), + ); + + diesel::insert_into(voteplans::table) + .values(voteplan_values) + .execute(&connection) + .unwrap(); + + match &full_proposal.challenge_info { + ProposalChallengeInfo::Simple(data) => { + let simple_values = ( + proposal_simple_challenge::proposal_id.eq(proposal.proposal_id.clone()), + proposal_simple_challenge::proposal_solution.eq(data.proposal_solution.clone()), + ); + diesel::insert_into(proposal_simple_challenge::table) + .values(simple_values) + .execute(&connection) + .unwrap(); + } + ProposalChallengeInfo::CommunityChoice(data) => { + let community_values = ( + proposal_community_choice_challenge::proposal_id + .eq(proposal.proposal_id.clone()), + proposal_community_choice_challenge::proposal_brief + .eq(data.proposal_brief.clone()), + proposal_community_choice_challenge::proposal_importance + .eq(data.proposal_importance.clone()), + proposal_community_choice_challenge::proposal_goal + .eq(data.proposal_goal.clone()), + proposal_community_choice_challenge::proposal_metrics + .eq(data.proposal_metrics.clone()), + ); + diesel::insert_into(proposal_community_choice_challenge::table) + .values(community_values) + .execute(&connection) + .unwrap(); + } + }; + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/models/proposals/community_choice.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/models/proposals/community_choice.rs new file mode 100644 index 0000000000..df2a3bdf49 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/models/proposals/community_choice.rs @@ -0,0 +1,36 @@ +use crate::db::schema::proposal_community_choice_challenge; +use diesel::ExpressionMethods; +use serde::{Deserialize, Serialize}; + +#[derive(Serialize, Deserialize, PartialEq, Eq, Debug, Clone)] +pub struct ChallengeInfo { + #[serde(alias = "proposalBrief")] + pub proposal_brief: String, + #[serde(alias = "proposalImportance")] + pub proposal_importance: String, + #[serde(alias = "proposalGoal")] + pub proposal_goal: String, + #[serde(alias = "proposalMetrics")] + pub proposal_metrics: String, +} + +pub type ChallengeSqlValues = ( + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, +); + +impl ChallengeInfo { + pub fn to_sql_values_with_proposal_id(&self, proposal_id: &str) -> ChallengeSqlValues { + ( + proposal_community_choice_challenge::proposal_id.eq(proposal_id.to_string()), + proposal_community_choice_challenge::proposal_brief.eq(self.proposal_brief.clone()), + proposal_community_choice_challenge::proposal_importance + .eq(self.proposal_importance.clone()), + proposal_community_choice_challenge::proposal_goal.eq(self.proposal_goal.clone()), + proposal_community_choice_challenge::proposal_metrics.eq(self.proposal_metrics.clone()), + ) + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/models/proposals/simple.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/models/proposals/simple.rs new file mode 100644 index 0000000000..ed24d5f61b --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/models/proposals/simple.rs @@ -0,0 +1,23 @@ +use crate::db::schema::proposal_simple_challenge; +use diesel::ExpressionMethods; +use serde::{Deserialize, Serialize}; + +#[derive(Serialize, Deserialize, PartialEq, Eq, Debug, Clone)] +pub struct ChallengeInfo { + #[serde(alias = "proposalSolution")] + pub proposal_solution: String, +} + +pub type ChallengeSqlValues = ( + diesel::dsl::Eq, + diesel::dsl::Eq, +); + +impl ChallengeInfo { + pub fn to_sql_values_with_proposal_id(&self, proposal_id: &str) -> ChallengeSqlValues { + ( + proposal_simple_challenge::proposal_id.eq(proposal_id.to_string()), + proposal_simple_challenge::proposal_solution.eq(self.proposal_solution.clone()), + ) + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/models/vote_options.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/models/vote_options.rs new file mode 100644 index 0000000000..aed58f7bd3 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/models/vote_options.rs @@ -0,0 +1,22 @@ +use itertools::Itertools; +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; + +pub type VoteOptionsMap = HashMap; + +#[derive(Serialize, Deserialize, Clone, PartialEq, Eq, Debug)] +pub struct VoteOptions(pub VoteOptionsMap); + +impl VoteOptions { + pub fn parse_coma_separated_value(csv: &str) -> VoteOptions { + VoteOptions(csv.split(',').map(str::to_string).zip(0..).collect()) + } + + pub fn as_csv_string(&self) -> String { + self.0 + .iter() + .sorted_by_key(|(_, &i)| i) + .map(|(v, _)| v) + .join(",") + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/models/voteplans.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/models/voteplans.rs new file mode 100644 index 0000000000..f24ac0908d --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/models/voteplans.rs @@ -0,0 +1,92 @@ +use crate::db::schema::voteplans; +use diesel::{ExpressionMethods, Insertable, Queryable}; +use serde::{Deserialize, Serialize}; + +#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, Queryable)] +pub struct Voteplan { + pub id: i32, + #[serde(alias = "chainVoteplanId")] + pub chain_voteplan_id: String, + #[serde(alias = "chainVoteStartTime")] + #[serde(serialize_with = "crate::utils::serde::serialize_unix_timestamp_as_rfc3339")] + #[serde(deserialize_with = "crate::utils::serde::deserialize_unix_timestamp_from_rfc3339")] + pub chain_vote_start_time: i64, + #[serde(alias = "chainVoteEndTime")] + #[serde(serialize_with = "crate::utils::serde::serialize_unix_timestamp_as_rfc3339")] + #[serde(deserialize_with = "crate::utils::serde::deserialize_unix_timestamp_from_rfc3339")] + pub chain_vote_end_time: i64, + #[serde(alias = "chainCommitteeEndTime")] + #[serde(serialize_with = "crate::utils::serde::serialize_unix_timestamp_as_rfc3339")] + #[serde(deserialize_with = "crate::utils::serde::deserialize_unix_timestamp_from_rfc3339")] + pub chain_committee_end_time: i64, + #[serde(alias = "chainVoteplanPayload")] + pub chain_voteplan_payload: String, + #[serde(alias = "chainVoteEncryptionKey")] + pub chain_vote_encryption_key: String, + #[serde(alias = "fundId")] + pub fund_id: i32, +} + +// This warning is disabled here. Values is only referenced as a type here. It should be ok not to +// split the types definitions. +#[allow(clippy::type_complexity)] +impl Insertable for Voteplan { + type Values = ( + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + diesel::dsl::Eq, + ); + + fn values(self) -> Self::Values { + ( + voteplans::chain_voteplan_id.eq(self.chain_voteplan_id), + voteplans::chain_vote_start_time.eq(self.chain_vote_start_time), + voteplans::chain_vote_end_time.eq(self.chain_vote_end_time), + voteplans::chain_committee_end_time.eq(self.chain_committee_end_time), + voteplans::chain_voteplan_payload.eq(self.chain_voteplan_payload), + voteplans::chain_vote_encryption_key.eq(self.chain_vote_encryption_key), + voteplans::fund_id.eq(self.fund_id), + ) + } +} + +#[cfg(test)] +pub mod test { + use crate::db::{models::voteplans::Voteplan, schema::voteplans, DbConnectionPool}; + use diesel::{ExpressionMethods, RunQueryDsl}; + use time::OffsetDateTime; + + pub fn get_test_voteplan_with_fund_id(fund_id: i32) -> Voteplan { + Voteplan { + id: 1, + chain_voteplan_id: format!("test_vote_plan{fund_id}"), + chain_vote_start_time: OffsetDateTime::now_utc().unix_timestamp(), + chain_vote_end_time: OffsetDateTime::now_utc().unix_timestamp(), + chain_committee_end_time: OffsetDateTime::now_utc().unix_timestamp(), + chain_voteplan_payload: "foopayload".to_string(), + chain_vote_encryption_key: "enckey".to_string(), + fund_id, + } + } + + pub fn populate_db_with_voteplan(voteplan: &Voteplan, pool: &DbConnectionPool) { + let connection = pool.get().unwrap(); + let values = ( + voteplans::chain_voteplan_id.eq(voteplan.chain_voteplan_id.clone()), + voteplans::chain_vote_start_time.eq(voteplan.chain_vote_start_time), + voteplans::chain_vote_end_time.eq(voteplan.chain_vote_end_time), + voteplans::chain_committee_end_time.eq(voteplan.chain_committee_end_time), + voteplans::chain_voteplan_payload.eq(voteplan.chain_voteplan_payload.clone()), + voteplans::chain_vote_encryption_key.eq(voteplan.chain_vote_encryption_key.clone()), + voteplans::fund_id.eq(voteplan.fund_id), + ); + diesel::insert_into(voteplans::table) + .values(values) + .execute(&connection) + .unwrap(); + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/queries/api_tokens.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/queries/api_tokens.rs new file mode 100644 index 0000000000..1b4fa1d5bb --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/queries/api_tokens.rs @@ -0,0 +1,96 @@ +use crate::db::models::api_tokens::ApiTokenData; +use crate::db::{ + models::api_tokens as api_token_model, + schema::{api_tokens, api_tokens::dsl::api_tokens as api_tokens_dsl}, + DbConnection, DbConnectionPool, +}; +use crate::v0::api_token::ApiToken; +use crate::v0::errors::HandleError; +use diesel::query_dsl::RunQueryDsl; +use diesel::{ExpressionMethods, Insertable, OptionalExtension, QueryDsl, QueryResult}; +use time::{Duration, OffsetDateTime}; + +pub async fn query_token( + token: ApiToken, + pool: &DbConnectionPool, +) -> Result, HandleError> { + let db_conn = pool.get().map_err(HandleError::DatabaseError)?; + tokio::task::spawn_blocking(move || { + query_token_data_by_token(token.as_ref(), &db_conn) + .map_err(|e| HandleError::InternalError(e.to_string())) + }) + .await + .map_err(|_| HandleError::InternalError("Error executing request".to_string()))? +} + +/// Insert a token asynchronously. This method is a wrapper over `insert_data_token` that uses the same +/// approach synchronously for a complete formed APITokenData object related to the database model. +pub async fn insert_token(token: &ApiToken, pool: &DbConnectionPool) -> Result<(), HandleError> { + let db_conn = pool.get().map_err(HandleError::DatabaseError)?; + + let api_token_data = ApiTokenData { + token: token.clone(), + creation_time: OffsetDateTime::now_utc().unix_timestamp(), + expire_time: (OffsetDateTime::now_utc() + Duration::days(365)).unix_timestamp(), + }; + + tokio::task::spawn_blocking(move || { + insert_token_data(api_token_data, &db_conn) + .map(|_| ()) + .map_err(|e| HandleError::InternalError(e.to_string())) + }) + .await + .map_err(|_| HandleError::InternalError("Error executing request".to_string()))? +} + +pub fn query_token_data_by_token( + raw_token: &[u8], + db_conn: &DbConnection, +) -> Result, diesel::result::Error> { + api_tokens_dsl + .filter(api_tokens::token.eq(raw_token)) + .first::(db_conn) + .optional() +} + +pub fn insert_token_data(token_data: ApiTokenData, db_conn: &DbConnection) -> QueryResult { + diesel::insert_into(api_tokens::table) + .values(token_data.values()) + .execute(db_conn) +} + +pub fn batch_insert_token_data( + tokens_data: &[ApiTokenData], + db_conn: &DbConnection, +) -> QueryResult { + diesel::insert_into(api_tokens::table) + .values( + tokens_data + .iter() + .map(|t| t.clone().values()) + .collect::>(), + ) + .execute(db_conn) +} + +#[cfg(test)] +mod test { + use super::*; + use crate::db::{ + load_db_connection_pool, migrations as db_testing, models::api_tokens::ApiTokenData, + DbConnectionPool, + }; + + #[tokio::test] + async fn api_token_insert_and_retrieve() { + // initialize db + let pool: DbConnectionPool = load_db_connection_pool("").unwrap(); + db_testing::initialize_db_with_migration(&pool.get().unwrap()); + + // checks + let token = ApiToken::new(b"foo_bar_zen".to_vec()); + insert_token(&token, &pool).await.unwrap(); + let token_data: ApiTokenData = query_token(token.clone(), &pool).await.unwrap().unwrap(); + assert_eq!(token_data.token, token); + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/queries/challenges.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/queries/challenges.rs new file mode 100644 index 0000000000..3c776a2f07 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/queries/challenges.rs @@ -0,0 +1,82 @@ +use crate::{ + db::{ + models::{challenges::Challenge, proposals::Proposal}, + schema::challenges::{self, dsl as challenges_dsl}, + views_schema::full_proposals_info::dsl as proposals_dsl, + DbConnection, DbConnectionPool, + }, + v0::errors::HandleError, +}; +use diesel::{ExpressionMethods, Insertable, QueryDsl, QueryResult, RunQueryDsl}; + +pub async fn query_all_challenges(pool: &DbConnectionPool) -> Result, HandleError> { + let db_conn = pool.get().map_err(HandleError::DatabaseError)?; + tokio::task::spawn_blocking(move || { + diesel::QueryDsl::order_by( + challenges_dsl::challenges, + challenges::dsl::internal_id.asc(), + ) + .load::(&db_conn) + .map_err(|_| HandleError::InternalError("Error retrieving challenges".to_string())) + }) + .await + .map_err(|_e| HandleError::InternalError("Error executing request".to_string()))? +} + +pub async fn query_challenge_by_id( + id: i32, + pool: &DbConnectionPool, +) -> Result { + let db_conn = pool.get().map_err(HandleError::DatabaseError)?; + tokio::task::spawn_blocking(move || { + diesel::QueryDsl::filter(challenges_dsl::challenges, challenges_dsl::id.eq(id)) + .first::(&db_conn) + .map_err(|_e| HandleError::NotFound("Error loading challenge".to_string())) + }) + .await + .map_err(|_e| HandleError::InternalError("Error executing request".to_string()))? +} + +pub async fn query_challenges_by_fund_id( + fund_id: i32, + pool: &DbConnectionPool, +) -> Result, HandleError> { + let db_conn = pool.get().map_err(HandleError::DatabaseError)?; + tokio::task::spawn_blocking(move || { + diesel::QueryDsl::filter( + challenges_dsl::challenges, + challenges_dsl::fund_id.eq(fund_id), + ) + .order_by(challenges::dsl::internal_id.asc()) + .load::(&db_conn) + .map_err(|_e| HandleError::NotFound("Error loading challenges for fund id".to_string())) + }) + .await + .map_err(|_e| HandleError::InternalError("Error executing request".to_string()))? +} + +pub async fn query_challenge_proposals_by_id( + id: i32, + pool: &DbConnectionPool, +) -> Result, HandleError> { + let db_conn = pool.get().map_err(HandleError::DatabaseError)?; + tokio::task::spawn_blocking(move || { + diesel::QueryDsl::filter( + proposals_dsl::full_proposals_info, + proposals_dsl::challenge_id.eq(id), + ) + .load::(&db_conn) + .map_err(|_e| HandleError::NotFound("Error loading challenge".to_string())) + }) + .await + .map_err(|_e| HandleError::InternalError("Error executing request".to_string()))? +} + +pub fn batch_insert_challenges( + challenges: &[>::Values], + db_conn: &DbConnection, +) -> QueryResult { + diesel::insert_into(challenges::table) + .values(challenges) + .execute(db_conn) +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/queries/community_advisors_reviews.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/queries/community_advisors_reviews.rs new file mode 100644 index 0000000000..4c18465eef --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/queries/community_advisors_reviews.rs @@ -0,0 +1,40 @@ +use crate::db::{ + models::community_advisors_reviews::AdvisorReview, + schema::community_advisors_reviews::{self, dsl as reviews_dsl}, + DbConnection, DbConnectionPool, +}; +use crate::v0::errors::HandleError; + +use diesel::{ExpressionMethods, Insertable, QueryDsl, QueryResult, RunQueryDsl}; + +pub async fn query_reviews_by_fund_id( + id: i32, + pool: &DbConnectionPool, +) -> Result, HandleError> { + let db_conn = pool.get().map_err(HandleError::DatabaseError)?; + tokio::task::spawn_blocking(move || { + reviews_dsl::community_advisors_reviews + .filter(reviews_dsl::proposal_id.eq(id)) + .load::(&db_conn) + .map_err(|_e| { + HandleError::NotFound("Error loading community advisors reviews".to_string()) + }) + }) + .await + .map_err(|_e| HandleError::InternalError("Error executing request".to_string()))? +} + +pub fn batch_insert_advisor_reviews( + reviews: &[AdvisorReview], + db_conn: &DbConnection, +) -> QueryResult { + diesel::insert_into(community_advisors_reviews::table) + .values( + reviews + .iter() + .cloned() + .map(|r| r.values()) + .collect::>(), + ) + .execute(db_conn) +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/queries/funds.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/queries/funds.rs new file mode 100644 index 0000000000..6ca0e238ba --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/queries/funds.rs @@ -0,0 +1,142 @@ +use crate::db::{ + models::{ + challenges::Challenge, + funds::{Fund, FundStageDates}, + goals::Goal, + voteplans::Voteplan, + }, + schema::{ + challenges::dsl as challenges_dsl, funds, funds::dsl as fund_dsl, goals::dsl as goals_dsl, + voteplans::dsl as voteplans_dsl, + }, + DbConnection, DbConnectionPool, +}; +use crate::v0::errors::HandleError; +use diesel::{ + r2d2::{ConnectionManager, PooledConnection}, + ExpressionMethods, Insertable, QueryDsl, QueryResult, RunQueryDsl, SqliteConnection, +}; +use serde::{Deserialize, Serialize}; + +fn join_fund( + mut fund: Fund, + db_conn: &PooledConnection>, +) -> Result { + let id = fund.id; + + fund.chain_vote_plans = voteplans_dsl::voteplans + .filter(voteplans_dsl::fund_id.eq(id)) + .load::(db_conn) + .map_err(|_e| HandleError::NotFound("Error loading voteplans".to_string()))?; + + fund.challenges = challenges_dsl::challenges + .filter(challenges_dsl::fund_id.eq(id)) + .load::(db_conn) + .map_err(|_e| HandleError::NotFound("Error loading challenges".to_string()))?; + + fund.goals = goals_dsl::goals + .filter(goals_dsl::fund_id.eq(id)) + .load::(db_conn) + .map_err(|_e| HandleError::NotFound("Error loading goals".to_string()))?; + + Ok(fund) +} + +pub async fn query_fund_by_id(id: i32, pool: &DbConnectionPool) -> Result { + let db_conn = pool.get().map_err(HandleError::DatabaseError)?; + tokio::task::spawn_blocking(move || { + let fund = fund_dsl::funds + .filter(fund_dsl::id.eq(id)) + .first::(&db_conn) + .map_err(|_e| HandleError::NotFound("fund".to_string()))?; + + join_fund(fund, &db_conn) + }) + .await + .map_err(|_e| HandleError::InternalError("Error executing request".to_string()))? +} + +#[derive(Serialize, Deserialize, Debug)] +pub struct FundWithNext { + #[serde(flatten)] + pub fund: Fund, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub next: Option, +} + +#[derive(Serialize, Deserialize, Debug)] +pub struct FundNextInfo { + pub id: i32, + pub fund_name: String, + #[serde(flatten)] + pub stage_dates: FundStageDates, +} + +pub async fn query_current_fund(pool: &DbConnectionPool) -> Result { + let db_conn = pool.get().map_err(HandleError::DatabaseError)?; + tokio::task::spawn_blocking(move || { + let funds: Vec = fund_dsl::funds + // TODO: Not sure if sorting by the PK is actually necessary + // + // this assumes that the next will be the second inserted + // and that the current is the first. + .order(fund_dsl::id) + .limit(2) + .load(&db_conn) + .map_err(|_e| HandleError::NotFound("fund".to_string()))?; + + let mut funds = funds.into_iter(); + let current = funds + .next() + .ok_or_else(|| HandleError::NotFound("current found not found".to_string()))?; + + let next = funds.next(); + + let current = join_fund(current, &db_conn)?; + + Ok(FundWithNext { + fund: current, + next: next.map(|f| FundNextInfo { + id: f.id, + fund_name: f.fund_name, + stage_dates: f.stage_dates, + }), + }) + }) + .await + .map_err(|_e| HandleError::InternalError("Error executing request".to_string()))? +} + +pub async fn query_all_funds(pool: &DbConnectionPool) -> Result, HandleError> { + let db_conn = pool.get().map_err(HandleError::DatabaseError)?; + tokio::task::spawn_blocking(move || { + fund_dsl::funds + .select(fund_dsl::id) + .load::(&db_conn) + .map_err(|_| HandleError::InternalError("Error retrieving funds".to_string())) + }) + .await + .map_err(|_e| HandleError::InternalError("Error executing request".to_string()))? +} + +pub fn insert_fund(fund: Fund, db_conn: &DbConnection) -> QueryResult { + diesel::insert_into(funds::table) + .values(fund.values()) + .execute(db_conn)?; + // This can be done in a single query if we move to postgres or any DB that supports `get_result` + // instead of `execute` in the previous insert + funds::table.order(fund_dsl::id.desc()).first(db_conn) +} + +pub async fn put_fund(fund: Fund, pool: &DbConnectionPool) -> Result<(), HandleError> { + let db_conn = pool.get().map_err(HandleError::DatabaseError)?; + diesel::replace_into(funds::table) + .values(fund.values()) + .execute(&db_conn) + .map_err(|_e| HandleError::InternalError("Error executing request".to_string()))?; + + // TODO: + // replace the voteplan and challenges too? + + Ok(()) +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/queries/goals.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/queries/goals.rs new file mode 100644 index 0000000000..504736f8b7 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/queries/goals.rs @@ -0,0 +1,8 @@ +use crate::db::{models::goals::InsertGoal, schema::goals, DbConnection}; +use diesel::{QueryResult, RunQueryDsl}; + +pub fn batch_insert(goals: Vec, db_conn: &DbConnection) -> QueryResult { + diesel::insert_into(goals::table) + .values(goals) + .execute(db_conn) +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/queries/mod.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/queries/mod.rs new file mode 100644 index 0000000000..f20eb9c1b7 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/queries/mod.rs @@ -0,0 +1,7 @@ +pub mod api_tokens; +pub mod challenges; +pub mod community_advisors_reviews; +pub mod funds; +pub mod goals; +pub mod proposals; +pub mod voteplans; diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/queries/proposals.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/queries/proposals.rs new file mode 100644 index 0000000000..4413d672f6 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/queries/proposals.rs @@ -0,0 +1,103 @@ +use crate::db::models::proposals::{community_choice, simple, FullProposalInfo, Proposal}; +use crate::db::schema::proposals; +use crate::db::{ + schema::{ + proposal_community_choice_challenge as community_choice_proposal_dsl, + proposal_simple_challenge as simple_proposal_dsl, + }, + views_schema::full_proposals_info::dsl as full_proposal_dsl, + views_schema::full_proposals_info::dsl::full_proposals_info, + DbConnection, DbConnectionPool, +}; +use crate::v0::errors::HandleError; +use diesel::query_dsl::filter_dsl::FilterDsl; +use diesel::{ExpressionMethods, Insertable, QueryResult, RunQueryDsl}; + +pub async fn query_all_proposals( + pool: &DbConnectionPool, +) -> Result, HandleError> { + let db_conn = pool.get().map_err(HandleError::DatabaseError)?; + tokio::task::spawn_blocking(move || { + full_proposals_info + .load::(&db_conn) + .map_err(|_e| HandleError::NotFound("proposals".to_string())) + }) + .await + .map_err(|_e| HandleError::InternalError("Error executing request".to_string()))? +} + +pub async fn query_proposal_by_id( + id: i32, + pool: &DbConnectionPool, +) -> Result { + let db_conn = pool.get().map_err(HandleError::DatabaseError)?; + tokio::task::spawn_blocking(move || { + full_proposals_info + .filter(full_proposal_dsl::id.eq(id)) + .first::(&db_conn) + .map_err(|_e| HandleError::NotFound(format!("proposal with id {}", id))) + }) + .await + .map_err(|_e| HandleError::InternalError("Error executing request".to_string()))? +} + +pub async fn query_proposals_by_voteplan_id_and_indexes( + voteplan_id: String, + indexes: Vec, + pool: DbConnectionPool, +) -> Result, HandleError> { + let db_conn = pool.get().map_err(HandleError::DatabaseError)?; + tokio::task::spawn_blocking(move || { + full_proposals_info + .filter(full_proposal_dsl::chain_voteplan_id.eq(voteplan_id.clone())) + .filter(full_proposal_dsl::chain_proposal_index.eq_any(&indexes)) + .load::(&db_conn) + .map_err(|_e| { + HandleError::NotFound(format!( + "proposal with voteplan id {} and indexes {:?}", + voteplan_id, indexes + )) + }) + }) + .await + .map_err(|_e| HandleError::InternalError("Error executing request".to_string()))? +} + +pub fn insert_proposal(proposal: Proposal, db_conn: &DbConnection) -> QueryResult { + diesel::insert_into(proposals::table) + .values(proposal.values()) + .execute(db_conn) +} + +pub fn batch_insert_proposals( + proposals_slice: &[Proposal], + db_conn: &DbConnection, +) -> QueryResult { + diesel::insert_into(proposals::table) + .values( + proposals_slice + .iter() + .cloned() + .map(|proposal| proposal.values()) + .collect::>(), + ) + .execute(db_conn) +} + +pub fn batch_insert_community_choice_challenge_data( + values: &[community_choice::ChallengeSqlValues], + db_conn: &DbConnection, +) -> QueryResult { + diesel::insert_into(community_choice_proposal_dsl::table) + .values(values) + .execute(db_conn) +} + +pub fn batch_insert_simple_challenge_data( + values: &[simple::ChallengeSqlValues], + db_conn: &DbConnection, +) -> QueryResult { + diesel::insert_into(simple_proposal_dsl::table) + .values(values) + .execute(db_conn) +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/queries/voteplans.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/queries/voteplans.rs new file mode 100644 index 0000000000..2aa38a95e4 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/queries/voteplans.rs @@ -0,0 +1,58 @@ +use crate::db::{ + models::voteplans::Voteplan, schema::voteplans, schema::voteplans::dsl as voteplans_dsl, + DbConnection, DbConnectionPool, +}; +use crate::v0::errors::HandleError; +use diesel::{ExpressionMethods, Insertable, QueryDsl, QueryResult, RunQueryDsl}; + +pub async fn query_voteplan_by_id( + id: i32, + pool: &DbConnectionPool, +) -> Result, HandleError> { + let db_conn = pool.get().map_err(HandleError::DatabaseError)?; + tokio::task::spawn_blocking(move || { + diesel::QueryDsl::filter(voteplans_dsl::voteplans, voteplans_dsl::fund_id.eq(id)) + .load::(&db_conn) + .map_err(|_e| HandleError::NotFound(format!("voteplan with id {}", id))) + }) + .await + .map_err(|_e| HandleError::InternalError("Error executing request".to_string()))? +} + +pub fn insert_voteplan(voteplan: Voteplan, db_conn: &DbConnection) -> QueryResult { + diesel::insert_into(voteplans::table) + .values(voteplan.values()) + .execute(db_conn)?; + + // This can be done in a single query if we move to postgres or any DB that supports `get_result` + // instead of `execute` in the previous insert + voteplans::table.order(voteplans::id.desc()).first(db_conn) +} + +pub fn batch_insert_voteplans( + voteplans_slice: &[Voteplan], + db_conn: &DbConnection, +) -> QueryResult> { + let len = voteplans_slice.len(); + + diesel::insert_into(voteplans::table) + .values( + voteplans_slice + .iter() + .cloned() + .map(|voteplan| voteplan.values()) + .collect::>(), + ) + .execute(db_conn)?; + + // This can be done in a single query if we move to postgres or any DB that supports `get_result` + // instead of `execute` in the previous insert + Ok(voteplans::table + .order(voteplans::id.desc()) + .limit(len as i64) + .load(db_conn)? + .iter() + .cloned() + .rev() + .collect()) +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/schema.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/schema.rs new file mode 100644 index 0000000000..615b010578 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/schema.rs @@ -0,0 +1,139 @@ +table! { + api_tokens (token) { + token -> Binary, + creation_time -> BigInt, + expire_time -> BigInt, + } +} + +table! { + challenges (id) { + internal_id -> Integer, + id -> Integer, + challenge_type -> Text, + title -> Text, + description -> Text, + rewards_total -> BigInt, + proposers_rewards -> BigInt, + fund_id -> Integer, + challenge_url -> Text, + highlights -> Nullable, + } +} + +table! { + community_advisors_reviews (id) { + id -> Integer, + proposal_id -> Integer, + assessor -> Text, + impact_alignment_rating_given -> Integer, + impact_alignment_note -> Text, + feasibility_rating_given -> Integer, + feasibility_note -> Text, + auditability_rating_given -> Integer, + auditability_note -> Text, + ranking -> Integer, + } +} + +table! { + funds (id) { + id -> Integer, + fund_name -> Text, + fund_goal -> Text, + registration_snapshot_time -> BigInt, + next_registration_snapshot_time -> BigInt, + voting_power_threshold -> BigInt, + fund_start_time -> BigInt, + fund_end_time -> BigInt, + next_fund_start_time -> BigInt, + insight_sharing_start -> BigInt, + proposal_submission_start -> BigInt, + refine_proposals_start -> BigInt, + finalize_proposals_start -> BigInt, + proposal_assessment_start -> BigInt, + assessment_qa_start -> BigInt, + snapshot_start -> BigInt, + voting_start -> BigInt, + voting_end -> BigInt, + tallying_end -> BigInt, + results_url -> Text, + survey_url -> Text, + } +} + +table! { + goals (id) { + id -> Integer, + goal_name -> Text, + fund_id -> Integer, + } +} + +table! { + proposal_community_choice_challenge (proposal_id) { + proposal_id -> Text, + proposal_brief -> Nullable, + proposal_importance -> Nullable, + proposal_goal -> Nullable, + proposal_metrics -> Nullable, + } +} + +table! { + proposal_simple_challenge (proposal_id) { + proposal_id -> Text, + proposal_solution -> Nullable, + } +} + +table! { + proposals (id) { + id -> Integer, + proposal_id -> Text, + proposal_category -> Text, + proposal_title -> Text, + proposal_summary -> Text, + proposal_public_key -> Text, + proposal_funds -> BigInt, + proposal_url -> Text, + proposal_files_url -> Text, + proposal_impact_score -> BigInt, + proposer_name -> Text, + proposer_contact -> Text, + proposer_url -> Text, + proposer_relevant_experience -> Text, + chain_proposal_id -> Binary, + chain_proposal_index -> BigInt, + chain_vote_options -> Text, + chain_voteplan_id -> Text, + challenge_id -> Integer, + } +} + +table! { + voteplans (id) { + id -> Integer, + chain_voteplan_id -> Text, + chain_vote_start_time -> BigInt, + chain_vote_end_time -> BigInt, + chain_committee_end_time -> BigInt, + chain_voteplan_payload -> Text, + chain_vote_encryption_key -> Text, + fund_id -> Integer, + } +} + +joinable!(goals -> funds (fund_id)); + +allow_tables_to_appear_in_same_query!( + api_tokens, + challenges, + community_advisors_reviews, + funds, + goals, + proposal_community_choice_challenge, + proposal_simple_challenge, + proposals, + voteplans, +); diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/views_schema.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/views_schema.rs new file mode 100644 index 0000000000..528afb64f6 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/db/views_schema.rs @@ -0,0 +1,38 @@ +use diesel::table; + +table! { + full_proposals_info { + id -> Integer, + proposal_id -> Text, + proposal_category -> Text, + proposal_title -> Text, + proposal_summary -> Text, + proposal_public_key -> Text, + proposal_funds -> BigInt, + proposal_url -> Text, + proposal_files_url -> Text, + proposal_impact_score -> BigInt, + proposer_name -> Text, + proposer_contact -> Text, + proposer_url -> Text, + proposer_relevant_experience -> Text, + chain_proposal_id -> Binary, + chain_proposal_index -> BigInt, + chain_vote_options -> Text, + chain_voteplan_id -> Text, + chain_vote_start_time -> BigInt, + chain_vote_end_time -> BigInt, + chain_committee_end_time -> BigInt, + chain_voteplan_payload -> Text, + chain_vote_encryption_key -> Text, + fund_id -> Integer, + challenge_id -> Integer, + reviews_count -> Integer, + challenge_type -> Text, + proposal_solution -> Nullable, + proposal_brief -> Nullable, + proposal_importance -> Nullable, + proposal_goal -> Nullable, + proposal_metrics -> Nullable, + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/lib.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/lib.rs new file mode 100644 index 0000000000..587ecf60b0 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/lib.rs @@ -0,0 +1,12 @@ +#[macro_use] +extern crate diesel; +#[macro_use] +extern crate structopt; + +#[macro_use] +extern crate diesel_migrations; + +pub mod db; +pub mod server; +pub mod utils; +pub mod v0; diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/server/bootstrapping.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/server/bootstrapping.rs new file mode 100644 index 0000000000..195855dc6d --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/server/bootstrapping.rs @@ -0,0 +1,77 @@ +use super::settings::{Cors, ServiceSettings, Tls}; + +use std::time::Duration; +use warp::filters::cors::Builder as CorsBuilder; +use warp::{Filter, TlsServer}; + +fn setup_cors(cors_config: Cors) -> CorsBuilder { + let mut cors: CorsBuilder = if let Some(allowed_origins) = cors_config.allowed_origins { + let allowed_origins: Vec<&str> = allowed_origins.iter().map(AsRef::as_ref).collect(); + warp::cors().allow_origins(allowed_origins) + } else { + warp::cors().allow_any_origin() + }; + + if let Some(max_age) = cors_config.max_age_secs { + cors = cors.max_age(Duration::from_secs(max_age)); + } + cors +} + +fn setup_tls(app: App, tls_config: Tls) -> TlsServer +where + App: Filter + Clone + Send + Sync + 'static, + App::Extract: warp::Reply, +{ + assert!( + tls_config.is_loaded(), + "Tls config should be filled before calling setup" + ); + let (cert_file, priv_key_file) = ( + tls_config.cert_file.unwrap(), + tls_config.priv_key_file.unwrap(), + ); + warp::serve(app) + .tls() + .cert_path(cert_file) + .key_path(priv_key_file) +} + +async fn start_server_with_config(app: App, settings: ServiceSettings) +where + App: Filter + Clone + Send + Sync + 'static, + App::Extract: warp::Reply, +{ + let app = app.with(setup_cors(settings.cors)); + + if settings.tls.is_loaded() { + let (_, server) = setup_tls(app, settings.tls).bind_with_graceful_shutdown( + settings.address, + super::signals::watch_signal_for_shutdown(), + ); + server.await + } else { + let (_, server) = warp::serve(app).bind_with_graceful_shutdown( + settings.address, + super::signals::watch_signal_for_shutdown(), + ); + server.await + }; +} + +pub async fn start_server(app: App, settings: Option) +where + App: Filter + Clone + Send + Sync + 'static, + App::Extract: warp::Reply, +{ + if let Some(settings) = settings { + start_server_with_config(app, settings).await + } else { + // easy way of starting a local debug server + let (_, server) = warp::serve(app).bind_with_graceful_shutdown( + ([127, 0, 0, 1], 3030), + super::signals::watch_signal_for_shutdown(), + ); + server.await + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/server/exit_codes.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/server/exit_codes.rs new file mode 100644 index 0000000000..131ff57478 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/server/exit_codes.rs @@ -0,0 +1,30 @@ +#[derive(PartialEq, Eq, Debug)] +pub enum ApplicationExitCode { + WriteSettingsError = 10, + LoadSettingsError, + DbConnectionError, + ServiceVersionError, + SnapshotWatcherError, +} + +impl ApplicationExitCode { + // TODO: this method can be generalize once std::num new features is stabilized. + // https://doc.rust-lang.org/0.12.0/std/num/trait.Num.html + // https://doc.rust-lang.org/0.12.0/std/num/trait.FromPrimitive.html + pub fn from_i32(n: i32) -> Option { + match n { + 10 => Some(Self::WriteSettingsError), + 11 => Some(Self::LoadSettingsError), + 12 => Some(Self::DbConnectionError), + 13 => Some(Self::ServiceVersionError), + 14 => Some(Self::SnapshotWatcherError), + _ => None, + } + } +} + +impl From for i32 { + fn from(exit_code: ApplicationExitCode) -> Self { + exit_code as i32 + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/server/mod.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/server/mod.rs new file mode 100644 index 0000000000..899070ea85 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/server/mod.rs @@ -0,0 +1,6 @@ +pub mod bootstrapping; +pub mod exit_codes; +pub mod settings; +pub mod signals; + +pub use bootstrapping::start_server; diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/server/settings/config.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/server/settings/config.rs new file mode 100644 index 0000000000..c83bc5dfb2 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/server/settings/config.rs @@ -0,0 +1,544 @@ +use serde::{de::Visitor, Deserialize, Deserializer, Serialize}; +use simplelog::LevelFilter; +use std::io::ErrorKind; +use std::net::SocketAddr; +use std::ops::Deref; +use std::path::PathBuf; +use std::str::FromStr; +use std::{fmt, fs}; + +/// Settings environment variables names +const DATABASE_URL: &str = "DATABASE_URL"; +const TLS_CERT_FILE: &str = "TLS_CERT_FILE"; +const TLS_PRIVATE_KEY_FILE: &str = "TLS_PK_FILE"; +const CORS_ALLOWED_ORIGINS: &str = "CORS_ALLOWED_ORIGINS"; +const VIT_SERVICE_VERSION_ENV_VARIABLE: &str = "SERVICE_VERSION"; + +pub(crate) const ADDRESS_DEFAULT: &str = "0.0.0.0:3030"; +pub(crate) const DB_URL_DEFAULT: &str = "./db/database.sqlite3"; +pub(crate) const BLOCK0_PATH_DEFAULT: &str = "./resources/v0/block0.bin"; + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, StructOpt)] +#[serde(deny_unknown_fields)] +#[structopt(rename_all = "kebab-case")] +pub struct ServiceSettings { + /// Load settings from file + #[serde(skip)] + #[structopt(long)] + pub in_settings_file: Option, + + /// Dump current settings to file + #[serde(skip)] + #[structopt(long)] + pub out_settings_file: Option, + + /// Server binding address + #[structopt(long, default_value = ADDRESS_DEFAULT)] + pub address: SocketAddr, + + #[serde(default)] + #[structopt(flatten)] + pub tls: Tls, + + #[serde(default)] + #[structopt(flatten)] + pub cors: Cors, + + /// Database url + #[structopt(long, env = DATABASE_URL, default_value = DB_URL_DEFAULT)] + pub db_url: String, + + /// block0 static file path + #[structopt(long, default_value = BLOCK0_PATH_DEFAULT)] + pub block0_path: String, + + /// Enable API Tokens feature + #[serde(default)] + #[structopt(long)] + pub enable_api_tokens: bool, + + #[serde(default)] + #[structopt(flatten)] + pub log: Log, + + #[structopt(long, env = VIT_SERVICE_VERSION_ENV_VARIABLE)] + pub service_version: String, +} + +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, StructOpt, Default)] +#[serde(deny_unknown_fields)] +#[structopt(rename_all = "kebab-case")] +pub struct Tls { + /// Path to server X.509 certificate chain file, must be PEM-encoded and contain at least 1 item + #[structopt(long, env = TLS_CERT_FILE)] + pub cert_file: Option, + + /// Path to server private key file, must be PKCS8 with single PEM-encoded, unencrypted key + #[structopt(long, env = TLS_PRIVATE_KEY_FILE)] + pub priv_key_file: Option, +} + +#[derive(Debug, Clone, Default, Serialize, PartialEq, Eq)] +pub struct CorsOrigin(String); + +#[derive(Debug, Default, Clone, Serialize, Deserialize, PartialEq, Eq)] +pub struct AllowedOrigins(Vec); + +#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, StructOpt)] +#[serde(deny_unknown_fields)] +#[structopt(rename_all = "kebab-case")] +pub struct Cors { + /// If none provided, echos request origin + #[serde(default)] + #[structopt(long, env = CORS_ALLOWED_ORIGINS, parse(try_from_str = parse_allowed_origins))] + pub allowed_origins: Option, + /// If none provided, CORS responses won't be cached + #[structopt(long)] + pub max_age_secs: Option, +} + +#[derive(Debug, Copy, Clone, Serialize, Deserialize, PartialEq, Eq)] +#[serde(rename_all = "lowercase")] +#[derive(Default)] +pub enum LogLevel { + #[default] + Disabled, + Error, + Warn, + Info, + Debug, + Trace, +} + +#[derive(Debug, Default, Clone, Serialize, Deserialize, PartialEq, Eq, StructOpt)] +#[serde(deny_unknown_fields)] +#[structopt(rename_all = "kebab-case")] +pub struct Log { + /// Output log file path + #[structopt(long)] + pub log_output_path: Option, + + /// Application logging level + #[structopt(long)] + pub log_level: Option, +} + +fn parse_allowed_origins(arg: &str) -> Result { + let mut res: Vec = Vec::new(); + for origin_str in arg.split(';') { + res.push(CorsOrigin::from_str(origin_str)?); + } + Ok(AllowedOrigins(res)) +} + +impl ServiceSettings { + pub fn override_from(&self, other_settings: &ServiceSettings) -> Self { + let mut return_settings = self.clone(); + + if let Some(in_file) = &other_settings.in_settings_file { + return_settings.in_settings_file = Some(in_file.clone()); + } + + if let Some(out_file) = &other_settings.out_settings_file { + return_settings.out_settings_file = Some(out_file.clone()); + } + + if other_settings.address != SocketAddr::from_str(ADDRESS_DEFAULT).unwrap() { + return_settings.address = other_settings.address; + } + + if other_settings.tls.is_loaded() { + return_settings.tls = other_settings.tls.clone(); + } + + if other_settings.cors.allowed_origins.is_some() { + return_settings.cors.allowed_origins = other_settings.cors.allowed_origins.clone(); + } + + if other_settings.cors.max_age_secs.is_some() { + return_settings.cors.max_age_secs = other_settings.cors.max_age_secs + } + + if other_settings.db_url != DB_URL_DEFAULT { + return_settings.db_url = other_settings.db_url.clone(); + } + + if other_settings.block0_path != BLOCK0_PATH_DEFAULT { + return_settings.block0_path = other_settings.block0_path.clone(); + } + + if other_settings.log.log_level.is_some() { + return_settings.log.log_level = other_settings.log.log_level; + } + + if other_settings.log.log_output_path.is_some() { + return_settings.log.log_output_path = other_settings.log.log_output_path.clone(); + } + + if !other_settings.service_version.is_empty() { + return_settings.service_version = other_settings.service_version.clone(); + } + + return_settings.enable_api_tokens = other_settings.enable_api_tokens; + + return_settings + } +} + +impl Tls { + pub fn is_loaded(&self) -> bool { + self.priv_key_file.is_some() && self.cert_file.is_some() + } +} + +impl<'de> Deserialize<'de> for CorsOrigin { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct CorsOriginVisitor; + impl<'de> Visitor<'de> for CorsOriginVisitor { + type Value = CorsOrigin; + + fn expecting(&self, fmt: &mut fmt::Formatter) -> fmt::Result { + write!(fmt, "an origin in format http[s]://example.com[:3000]",) + } + + fn visit_str(self, v: &str) -> std::result::Result + where + E: serde::de::Error, + { + CorsOrigin::from_str(v).map_err(E::custom) + } + } + deserializer.deserialize_str(CorsOriginVisitor) + } +} + +impl FromStr for CorsOrigin { + type Err = std::io::Error; + + fn from_str(origin: &str) -> Result { + let uri = warp::http::uri::Uri::from_str(origin).map_err(|invalid_uri| { + std::io::Error::new( + ErrorKind::InvalidInput, + format!("Invalid uri: {}.\n{}", origin, invalid_uri), + ) + })?; + if let Some(s) = uri.scheme_str() { + if s != "http" && s != "https" { + return Err(std::io::Error::new( + ErrorKind::InvalidInput, + format!( + "Cors origin invalid schema {}, only [http] and [https] are supported: ", + uri.scheme_str().unwrap() + ), + )); + } + } else { + return Err(std::io::Error::new( + ErrorKind::InvalidInput, + "Cors origin missing schema, only [http] or [https] are supported", + )); + } + + if let Some(p) = uri.path_and_query() { + if p.as_str() != "/" { + return Err(std::io::Error::new( + ErrorKind::InvalidInput, + format!("Invalid value {} in cors schema.", p.as_str()), + )); + } + } + Ok(CorsOrigin(origin.trim_end_matches('/').to_owned())) + } +} + +impl AsRef for CorsOrigin { + fn as_ref(&self) -> &str { + &self.0 + } +} + +impl Deref for AllowedOrigins { + type Target = Vec; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl From for LevelFilter { + fn from(level: LogLevel) -> Self { + match level { + LogLevel::Disabled => LevelFilter::Off, + LogLevel::Error => LevelFilter::Error, + LogLevel::Warn => LevelFilter::Warn, + LogLevel::Info => LevelFilter::Info, + LogLevel::Debug => LevelFilter::Debug, + LogLevel::Trace => LevelFilter::Trace, + } + } +} + +impl From for tracing_subscriber::filter::LevelFilter { + fn from(level: LogLevel) -> Self { + match level { + LogLevel::Disabled => tracing_subscriber::filter::LevelFilter::OFF, + LogLevel::Error => tracing_subscriber::filter::LevelFilter::ERROR, + LogLevel::Warn => tracing_subscriber::filter::LevelFilter::WARN, + LogLevel::Info => tracing_subscriber::filter::LevelFilter::INFO, + LogLevel::Debug => tracing_subscriber::filter::LevelFilter::DEBUG, + LogLevel::Trace => tracing_subscriber::filter::LevelFilter::TRACE, + } + } +} + +impl FromStr for LogLevel { + type Err = std::io::Error; + + fn from_str(s: &str) -> Result { + match s.to_lowercase().as_str() { + "disabled" => Ok(Self::Disabled), + "error" => Ok(Self::Error), + "warn" => Ok(Self::Warn), + "info" => Ok(Self::Info), + "debug" => Ok(Self::Debug), + "trace" => Ok(Self::Trace), + _ => Err(std::io::Error::new( + std::io::ErrorKind::InvalidData, + format!("{} is not a valid log level", s), + )), + } + } +} + +impl fmt::Display for LogLevel { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + match *self { + Self::Disabled => write!(f, "disabled"), + Self::Error => write!(f, "error"), + Self::Warn => write!(f, "warn"), + Self::Info => write!(f, "info"), + Self::Debug => write!(f, "debug"), + Self::Trace => write!(f, "trace"), + } + } +} + +pub fn load_settings_from_file(file_path: &str) -> Result { + let f = fs::File::open(file_path)?; + serde_json::from_reader(&f) + .map_err(|e| std::io::Error::new(std::io::ErrorKind::InvalidInput, e.to_string())) +} + +pub fn dump_settings_to_file( + file_path: &str, + settings: &ServiceSettings, +) -> Result<(), impl std::error::Error> { + let f = fs::File::create(file_path)?; + serde_json::to_writer_pretty(&f, settings) + .map_err(|e| std::io::Error::new(std::io::ErrorKind::Other, e.to_string())) +} + +#[cfg(test)] +mod test { + use super::*; + use std::net::SocketAddr; + use std::str::FromStr; + use structopt::StructOpt; + use tempfile; + + #[test] + fn cors_origin_from_str() { + let s = "https://foo.test"; + CorsOrigin::from_str(s).unwrap(); + } + + #[test] + fn parse_allowed_origins_from_str() { + let s = "https://foo.test;https://test.foo:5050"; + let res = parse_allowed_origins(s).unwrap(); + assert_eq!(res.len(), 2); + assert_eq!(res[0], CorsOrigin::from_str("https://foo.test").unwrap()); + assert_eq!( + res[1], + CorsOrigin::from_str("https://test.foo:5050").unwrap() + ); + } + + #[test] + fn load_json_configuration() { + let raw_config = r#" + { + "address" : "127.0.0.1:3030", + "tls" : { + "cert_file" : "./foo/bar.pem", + "priv_key_file" : "./bar/foo.pem" + }, + "cors" : { + "allowed_origins" : ["https://foo.test"], + "max_age_secs" : 60 + }, + "db_url": "", + "block0_path": "./test/bin.test", + "enable_api_tokens" : true, + "log" : { + "log_output_path" : "./server.log", + "log_level" : "error" + }, + "service_version" : "v0.2.0" + } + "#; + + let config: ServiceSettings = serde_json::from_str(raw_config).unwrap(); + assert_eq!( + config.address, + SocketAddr::from_str("127.0.0.1:3030").unwrap() + ); + assert_eq!(config.block0_path, "./test/bin.test"); + assert!(config.enable_api_tokens); + assert_eq!( + config.log.log_output_path.unwrap(), + std::path::PathBuf::from_str("./server.log").unwrap() + ); + assert_eq!(config.log.log_level, Some(LogLevel::Error)); + assert_eq!(config.service_version, "v0.2.0"); + let tls_config = config.tls; + let cors_config = config.cors; + assert_eq!(tls_config.cert_file.unwrap(), "./foo/bar.pem"); + assert_eq!(tls_config.priv_key_file.unwrap(), "./bar/foo.pem"); + assert_eq!( + cors_config.allowed_origins.unwrap()[0], + CorsOrigin("https://foo.test".to_string()) + ); + assert_eq!(cors_config.max_age_secs.unwrap(), 60); + } + + #[test] + fn dump_and_load_settings_to_file() { + let temp_file_path = tempfile::NamedTempFile::new().unwrap().into_temp_path(); + let settings = ServiceSettings::default(); + let file_path = temp_file_path.to_str().unwrap(); + dump_settings_to_file(file_path, &settings).unwrap(); + let loaded_settings = load_settings_from_file(file_path).unwrap(); + assert_eq!(settings, loaded_settings); + } + + #[test] + fn load_settings_from_cli() { + let settings: ServiceSettings = ServiceSettings::from_iter(&[ + "test", + "--address", + "127.0.0.1:3030", + "--cert-file", + "foo.bar", + "--priv-key-file", + "bar.foo", + "--db-url", + "database.sqlite3", + "--max-age-secs", + "60", + "--allowed-origins", + "https://foo.test;https://test.foo:5050", + "--log-output-path", + "./log.log", + "--log-level", + "error", + "--enable-api-tokens", + "--service-version", + "v0.2.0", + ]); + + assert_eq!( + settings.address, + SocketAddr::from_str("127.0.0.1:3030").unwrap() + ); + + assert!(settings.tls.is_loaded()); + assert!(settings.enable_api_tokens); + assert_eq!(settings.tls.cert_file.unwrap(), "foo.bar"); + assert_eq!(settings.tls.priv_key_file.unwrap(), "bar.foo"); + assert_eq!(settings.db_url, "database.sqlite3"); + assert_eq!(settings.cors.max_age_secs.unwrap(), 60); + assert_eq!(settings.service_version, "v0.2.0"); + let allowed_origins = settings.cors.allowed_origins.unwrap(); + assert_eq!(allowed_origins.len(), 2); + assert_eq!( + allowed_origins[0], + CorsOrigin("https://foo.test".to_string()) + ); + assert_eq!( + settings.log.log_output_path.unwrap(), + std::path::PathBuf::from_str("./log.log").unwrap() + ); + assert_eq!(settings.log.log_level, Some(LogLevel::Error)); + } + + #[test] + fn load_settings_from_env() { + use std::env::set_var; + set_var(DATABASE_URL, "database.sqlite3"); + set_var(TLS_CERT_FILE, "foo.bar"); + set_var(TLS_PRIVATE_KEY_FILE, "bar.foo"); + set_var( + CORS_ALLOWED_ORIGINS, + "https://foo.test;https://test.foo:5050", + ); + set_var(VIT_SERVICE_VERSION_ENV_VARIABLE, "v0.2.0"); + + let settings: ServiceSettings = ServiceSettings::from_iter(&[ + "test", + "--address", + "127.0.0.1:3030", + "--max-age-secs", + "60", + ]); + + assert_eq!( + settings.address, + SocketAddr::from_str("127.0.0.1:3030").unwrap() + ); + + assert!(settings.tls.is_loaded()); + assert_eq!(settings.tls.cert_file.unwrap(), "foo.bar"); + assert_eq!(settings.tls.priv_key_file.unwrap(), "bar.foo"); + assert_eq!(settings.db_url, "database.sqlite3"); + assert_eq!(settings.cors.max_age_secs.unwrap(), 60); + assert_eq!(settings.service_version, "v0.2.0"); + let allowed_origins = settings.cors.allowed_origins.unwrap(); + assert_eq!(allowed_origins.len(), 2); + assert_eq!( + allowed_origins[0], + CorsOrigin("https://foo.test".to_string()) + ); + } + + #[test] + fn merge_settings() { + let default = ServiceSettings::default(); + let other_settings = ServiceSettings::from_iter(&[ + "test", + "--address", + "127.0.0.1:8080", + "--cert-file", + "foo.bar", + "--priv-key-file", + "bar.foo", + "--db-url", + "database.sqlite3", + "--max-age-secs", + "60", + "--allowed-origins", + "https://foo.test;https://test.foo:5050", + "--block0-path", + "block0.bin", + "--enable-api-tokens", + "--service-version", + "v0.2.0", + ]); + + let merged_settings = default.override_from(&other_settings); + assert_eq!(merged_settings, other_settings); + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/server/settings/default.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/server/settings/default.rs new file mode 100644 index 0000000000..4dec7e7b4b --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/server/settings/default.rs @@ -0,0 +1,21 @@ +use super::config::ServiceSettings; +use super::config::{Cors, Log, Tls, ADDRESS_DEFAULT, BLOCK0_PATH_DEFAULT, DB_URL_DEFAULT}; +use std::net::SocketAddr; +use std::str::FromStr; + +impl Default for ServiceSettings { + fn default() -> Self { + Self { + in_settings_file: None, + out_settings_file: None, + address: SocketAddr::from_str(ADDRESS_DEFAULT).unwrap(), + tls: Tls::default(), + cors: Cors::default(), + db_url: DB_URL_DEFAULT.to_string(), + block0_path: BLOCK0_PATH_DEFAULT.to_string(), + enable_api_tokens: false, + log: Log::default(), + service_version: "".to_string(), + } + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/server/settings/mod.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/server/settings/mod.rs new file mode 100644 index 0000000000..b27e01d11e --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/server/settings/mod.rs @@ -0,0 +1,7 @@ +mod config; +mod default; + +pub use config::{ + dump_settings_to_file, load_settings_from_file, Cors, CorsOrigin, LogLevel, ServiceSettings, + Tls, +}; diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/server/signals.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/server/signals.rs new file mode 100644 index 0000000000..e6e940f83c --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/server/signals.rs @@ -0,0 +1,25 @@ +use tokio::signal; + +#[cfg(target_os = "unix")] +pub async fn watch_signal_for_shutdown() { + let mut interrupt_signal = signal::unix::signal(signal::unix::SignalKind::interrupt()) + .expect("Error setting up interrupt signal"); + + let mut terminate_signal = signal::unix::signal(signal::unix::SignalKind::terminate()) + .expect("Error setting up terminate signal"); + + let mut quit_signal = signal::unix::signal(signal::unix::SignalKind::quit()) + .expect("Error setting up quit signal"); + + tokio::select! { + _ = signal::ctrl_c() => (), + _ = interrupt_stream.recv() => (), + _ = terminate_signal.recv() => (), + _ = quit_signal.recv() => (), + } +} + +#[cfg(not(target_os = "unix"))] +pub async fn watch_signal_for_shutdown() { + signal::ctrl_c().await.ok(); +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/utils/datetime.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/utils/datetime.rs new file mode 100644 index 0000000000..eb22952bd9 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/utils/datetime.rs @@ -0,0 +1,5 @@ +use time::OffsetDateTime; + +pub fn unix_timestamp_to_datetime(timestamp: i64) -> OffsetDateTime { + OffsetDateTime::from_unix_timestamp(timestamp).expect("invalid timestamp") +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/utils/mod.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/utils/mod.rs new file mode 100644 index 0000000000..c52741d6dd --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/utils/mod.rs @@ -0,0 +1,2 @@ +pub mod datetime; +pub mod serde; diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/utils/serde.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/utils/serde.rs new file mode 100644 index 0000000000..28f73f1a5c --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/utils/serde.rs @@ -0,0 +1,145 @@ +use crate::db::models::vote_options::VoteOptions; +use crate::utils::datetime::unix_timestamp_to_datetime; +use serde::de::Visitor; +use serde::{ser::Error, Deserialize, Deserializer, Serializer}; +use std::fmt; +use time::{format_description::well_known::Rfc3339, OffsetDateTime}; + +// this warning should be disable here since the interface for this function requires +// the first argument to be passed by value +#[allow(clippy::trivially_copy_pass_by_ref)] +pub fn serialize_unix_timestamp_as_rfc3339( + timestamp: &i64, + serializer: S, +) -> Result { + let datetime = unix_timestamp_to_datetime(*timestamp); + serializer.serialize_str( + &datetime + .format(&Rfc3339) + .map_err(|e| S::Error::custom(format!("Could not serialize date: {}", e)))?, + ) +} + +pub fn deserialize_unix_timestamp_from_rfc3339<'de, D>(deserializer: D) -> Result +where + D: Deserializer<'de>, +{ + struct Rfc3339Deserializer(); + + impl<'de> Visitor<'de> for Rfc3339Deserializer { + type Value = OffsetDateTime; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("An rfc3339 compatible string is needed") + } + + fn visit_str(self, value: &str) -> Result + where + E: serde::de::Error, + { + OffsetDateTime::parse(value, &Rfc3339).map_err(|e| E::custom(format!("{}", e))) + } + } + + deserializer + .deserialize_str(Rfc3339Deserializer()) + .map(|datetime| datetime.unix_timestamp()) +} + +pub fn serialize_bin_as_str(data: &[u8], serializer: S) -> Result { + serializer.serialize_str(&String::from_utf8(data.to_vec()).unwrap()) +} + +pub fn deserialize_string_as_bytes<'de, D>(deserializer: D) -> Result, D::Error> +where + D: Deserializer<'de>, +{ + struct VecU8Deserializer(); + + impl<'de> Visitor<'de> for VecU8Deserializer { + type Value = Vec; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("A compatible utf8 string is needed") + } + + fn visit_str(self, value: &str) -> Result, E> + where + E: serde::de::Error, + { + let vec = value.as_bytes().to_vec(); + Ok(vec) + } + } + + deserializer.deserialize_str(VecU8Deserializer()) +} + +// this warning should be disable here since the interface for this function requires +// the first argument to be passed by value +#[allow(clippy::trivially_copy_pass_by_ref)] +pub fn serialize_i64_as_str(data: &i64, serializer: S) -> Result { + serializer.serialize_str(&data.to_string()) +} + +pub fn deserialize_i64_from_str<'de, D>(deserializer: D) -> Result +where + D: Deserializer<'de>, +{ + struct I64Deserializer(); + + impl<'de> Visitor<'de> for I64Deserializer { + type Value = i64; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("a compatible i64 number or string with i64 format") + } + + fn visit_str(self, value: &str) -> Result + where + E: serde::de::Error, + { + value + .parse() + .map_err(|e| E::custom(format!("Error parsing {} to i64: {}", value, e))) + } + } + deserializer.deserialize_str(I64Deserializer()) +} + +pub fn deserialize_vote_options_from_string<'de, D>( + deserializer: D, +) -> Result +where + D: Deserializer<'de>, +{ + struct VoteOptionsDeserializer(); + + impl<'de> Visitor<'de> for VoteOptionsDeserializer { + type Value = VoteOptions; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("A coma separated values are needed") + } + + fn visit_str(self, value: &str) -> Result + where + E: serde::de::Error, + { + Ok(VoteOptions::parse_coma_separated_value(value)) + } + } + + deserializer.deserialize_str(VoteOptionsDeserializer()) +} + +pub fn deserialize_truthy_falsy<'de, D>(deserializer: D) -> Result +where + D: Deserializer<'de>, +{ + let truthy_value = <&str>::deserialize(deserializer)?; + Ok(matches!( + truthy_value.to_lowercase().as_ref(), + "x" | "1" | "true" + )) +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/api_token.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/api_token.rs new file mode 100644 index 0000000000..f91ae8bd5b --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/api_token.rs @@ -0,0 +1,167 @@ +use crate::db::{queries::api_tokens as api_tokens_queries, DbConnectionPool}; +use crate::v0::{context::SharedContext, errors::HandleError}; +use warp::{Filter, Rejection}; + +/// Header where token should be present in requests +pub const API_TOKEN_HEADER: &str = "API-Token"; + +/// API Token wrapper type +#[derive(PartialEq, Eq, Debug, Clone)] +pub struct ApiToken(Vec); + +/// API token manager is an abstraction on the API tokens for the service +/// The main idea is to keep the service agnostic of what kind of backend we are using such task. +/// Right now we rely on a SQLlite connection. But in the future it maybe be something else like a +/// REDIS, or some other hybrid system. +pub struct ApiTokenManager { + connection_pool: DbConnectionPool, +} + +impl From<&[u8]> for ApiToken { + fn from(data: &[u8]) -> Self { + Self(data.to_vec()) + } +} + +impl AsRef<[u8]> for ApiToken { + fn as_ref(&self) -> &[u8] { + self.0.as_slice() + } +} + +impl ApiToken { + pub fn new(data: Vec) -> Self { + Self(data) + } +} + +impl ApiTokenManager { + fn new(connection_pool: DbConnectionPool) -> Self { + Self { connection_pool } + } + + async fn is_token_valid(&self, token: ApiToken) -> Result { + match api_tokens_queries::query_token(token, &self.connection_pool).await { + Ok(Some(_)) => Ok(true), + Ok(None) => Ok(false), + Err(e) => Err(HandleError::InternalError(format!( + "Error retrieving token: {}", + e + ))), + } + } + + #[allow(dead_code)] + async fn revoke_token(&self, _token: ApiToken) -> Result<(), ()> { + Ok(()) + } +} + +async fn authorize_token(token: String, context: SharedContext) -> Result<(), Rejection> { + let manager = ApiTokenManager::new(context.read().await.db_connection_pool.clone()); + + let mut token_vec: Vec = Vec::new(); + base64::decode_config_buf(token.clone(), base64::URL_SAFE, &mut token_vec).map_err(|_err| { + warp::reject::custom(HandleError::InvalidHeader( + API_TOKEN_HEADER, + "header should be base64 url safe decodable", + )) + })?; + + let api_token = ApiToken(token_vec); + + match manager.is_token_valid(api_token).await { + Ok(true) => Ok(()), + Ok(false) => { + tracing::event!( + tracing::Level::INFO, + "Unauthorized token received: {}", + token + ); + Err(warp::reject::custom(HandleError::UnauthorizedToken)) + } + Err(e) => Err(warp::reject::custom(e)), + } +} + +/// A warp filter that checks authorization through API tokens. +/// The header `API_TOKEN_HEADER` should be present and valid otherwise the request is rejected. +pub async fn api_token_filter( + context: SharedContext, +) -> impl Filter + Clone { + let with_context = warp::any().map(move || context.clone()); + warp::header::header(API_TOKEN_HEADER) + .and(with_context) + .and_then(authorize_token) + .and(warp::any()) + .untuple_one() +} + +#[cfg(test)] +mod test { + use crate::db::{ + migrations as db_testing, models::api_tokens as api_token_model, + models::api_tokens::ApiTokenData, schema::api_tokens, DbConnectionPool, + }; + use crate::v0::api_token::{api_token_filter, ApiToken, API_TOKEN_HEADER}; + use crate::v0::context::test::new_in_memmory_db_test_shared_context; + use diesel::{ExpressionMethods, RunQueryDsl}; + use time::OffsetDateTime; + + pub fn get_testing_token() -> (api_token_model::ApiTokenData, String) { + let data = b"ffffffffffffffffffffffffffffffff".to_vec(); + let token_data = ApiTokenData { + token: ApiToken(data.clone()), + creation_time: OffsetDateTime::now_utc().unix_timestamp(), + expire_time: OffsetDateTime::now_utc().unix_timestamp(), + }; + ( + token_data, + base64::encode_config(data, base64::URL_SAFE_NO_PAD), + ) + } + + pub fn insert_token_to_db(token: ApiTokenData, db: &DbConnectionPool) { + let conn = db.get().unwrap(); + let values = ( + api_tokens::dsl::token.eq(token.token.0.clone()), + api_tokens::dsl::creation_time.eq(token.creation_time), + api_tokens::dsl::expire_time.eq(token.expire_time), + ); + diesel::insert_into(api_tokens::table) + .values(values) + .execute(&conn) + .unwrap(); + } + + #[tokio::test] + async fn api_token_filter_reject() { + let shared_context = new_in_memmory_db_test_shared_context(); + let filter = api_token_filter(shared_context).await; + + assert!(warp::test::request() + .header(API_TOKEN_HEADER, "foobar") + .filter(&filter) + .await + .is_err()); + } + + #[tokio::test] + async fn api_token_filter_accepted() { + let shared_context = new_in_memmory_db_test_shared_context(); + + // initialize db + let pool = &shared_context.read().await.db_connection_pool; + db_testing::initialize_db_with_migration(&pool.get().unwrap()); + let (token, base64_token) = get_testing_token(); + insert_token_to_db(token, pool); + + let filter = api_token_filter(shared_context.clone()).await; + + assert!(warp::test::request() + .header(API_TOKEN_HEADER, base64_token) + .filter(&filter) + .await + .is_ok()); + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/context.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/context.rs new file mode 100644 index 0000000000..97265c6658 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/context.rs @@ -0,0 +1,74 @@ +use crate::db; +use std::sync::Arc; +use tokio::sync::RwLock; + +pub type SharedContext = Arc>; + +#[derive(Clone)] +pub struct Context { + pub db_connection_pool: db::DbConnectionPool, + pub block0_path: String, + pub block0: Vec, + pub versioning: String, +} + +impl Context { + pub fn new( + db_connection_pool: db::DbConnectionPool, + block0_path: &str, + block0: Vec, + versioning: String, + ) -> Self { + Self { + db_connection_pool, + block0_path: block0_path.to_string(), + block0, + versioning, + } + } +} + +pub fn new_shared_context( + db_connection_pool: db::DbConnectionPool, + block0_path: &str, + versioning: &str, +) -> SharedContext { + let block0 = std::fs::read(block0_path).unwrap_or_default(); + let context = Context::new( + db_connection_pool, + block0_path, + block0, + versioning.to_string(), + ); + Arc::new(RwLock::new(context)) +} + +#[cfg(test)] +pub mod test { + use rand::{distributions::Alphanumeric, thread_rng, Rng}; + + use super::*; + use crate::db; + + pub fn new_in_memmory_db_test_shared_context() -> SharedContext { + let name: String = thread_rng() + .sample_iter(Alphanumeric) + .take(5) + .map(char::from) + .collect(); + let db_url = format!("file:{}?mode=memory&cache=shared", name); + let pool = db::load_db_connection_pool(&db_url).unwrap(); + let block0: Vec = vec![1, 2, 3, 4, 5]; + Arc::new(RwLock::new(Context::new( + pool, + "", + block0, + "2.0".to_string(), + ))) + } + + pub fn new_test_shared_context(db_url: &str, block0_path: &str) -> SharedContext { + let pool = db::load_db_connection_pool(db_url).unwrap(); + new_shared_context(pool, block0_path, "2.0") + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/advisor_reviews/handlers.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/advisor_reviews/handlers.rs new file mode 100644 index 0000000000..6bfc1e6b33 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/advisor_reviews/handlers.rs @@ -0,0 +1,12 @@ +use super::logic; +use crate::v0::{context::SharedContext, result::HandlerResult}; +use warp::{Rejection, Reply}; + +pub async fn get_reviews_with_proposal_id( + id: i32, + context: SharedContext, +) -> Result { + Ok(HandlerResult( + logic::get_advisor_reviews_with_proposal_id(id, context).await, + )) +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/advisor_reviews/logic.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/advisor_reviews/logic.rs new file mode 100644 index 0000000000..f18972ea51 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/advisor_reviews/logic.rs @@ -0,0 +1,25 @@ +use crate::db::{ + models::community_advisors_reviews::AdvisorReview, + queries::community_advisors_reviews as advisor_reviews_queries, +}; +use crate::v0::context::SharedContext; +use crate::v0::endpoints::advisor_reviews::schemas::GroupedReviews; +use crate::v0::errors::HandleError; +use std::collections::HashMap; + +pub async fn get_advisor_reviews_with_proposal_id( + id: i32, + context: SharedContext, +) -> Result { + let pool = &context.read().await.db_connection_pool; + let reviews = advisor_reviews_queries::query_reviews_by_fund_id(id, pool).await?; + Ok(group_reviews_by_assessor(reviews)) +} + +fn group_reviews_by_assessor(reviews: Vec) -> GroupedReviews { + let mut map: HashMap> = HashMap::new(); + for review in reviews { + map.entry(review.assessor.clone()).or_default().push(review); + } + GroupedReviews(map) +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/advisor_reviews/mod.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/advisor_reviews/mod.rs new file mode 100644 index 0000000000..63e82d1d35 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/advisor_reviews/mod.rs @@ -0,0 +1,6 @@ +mod handlers; +mod logic; +mod routes; +mod schemas; + +pub use routes::filter; diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/advisor_reviews/routes.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/advisor_reviews/routes.rs new file mode 100644 index 0000000000..cc67254984 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/advisor_reviews/routes.rs @@ -0,0 +1,18 @@ +use super::handlers::*; +use crate::v0::context::SharedContext; +use warp::filters::BoxedFilter; +use warp::{Filter, Rejection, Reply}; + +pub async fn filter( + root: BoxedFilter<()>, + context: SharedContext, +) -> impl Filter + Clone { + let with_context = warp::any().map(move || context.clone()); + + let reviews = warp::path!(i32) + .and(warp::get()) + .and(with_context) + .and_then(get_reviews_with_proposal_id); + + root.and(reviews) +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/advisor_reviews/schemas.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/advisor_reviews/schemas.rs new file mode 100644 index 0000000000..5007c9d618 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/advisor_reviews/schemas.rs @@ -0,0 +1,8 @@ +use crate::db::models::community_advisors_reviews::AdvisorReview; + +use serde::{Deserialize, Serialize}; + +use std::collections::HashMap; + +#[derive(Serialize, Deserialize)] +pub struct GroupedReviews(pub HashMap>); diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/challenges/handlers.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/challenges/handlers.rs new file mode 100644 index 0000000000..caaeb8f51b --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/challenges/handlers.rs @@ -0,0 +1,11 @@ +use super::logic; +use crate::v0::{context::SharedContext, result::HandlerResult}; +use warp::{Rejection, Reply}; + +pub async fn get_challenges(context: SharedContext) -> Result { + Ok(HandlerResult(logic::get_all_challenges(context).await)) +} + +pub async fn get_challenge_by_id(id: i32, context: SharedContext) -> Result { + Ok(HandlerResult(logic::get_challenge_by_id(id, context).await)) +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/challenges/logic.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/challenges/logic.rs new file mode 100644 index 0000000000..e7f857fb0f --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/challenges/logic.rs @@ -0,0 +1,22 @@ +use super::schemas::ChallengeWithProposals; +use crate::db::{models::challenges::Challenge, queries::challenges as challenges_queries}; +use crate::v0::context::SharedContext; +use crate::v0::errors::HandleError; + +pub async fn get_all_challenges(context: SharedContext) -> Result, HandleError> { + let pool = &context.read().await.db_connection_pool; + challenges_queries::query_all_challenges(pool).await +} + +pub async fn get_challenge_by_id( + id: i32, + context: SharedContext, +) -> Result { + let pool = &context.read().await.db_connection_pool; + let challenge = challenges_queries::query_challenge_by_id(id, pool).await?; + let proposals = challenges_queries::query_challenge_proposals_by_id(id, pool).await?; + Ok(ChallengeWithProposals { + challenge, + proposals, + }) +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/challenges/mod.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/challenges/mod.rs new file mode 100644 index 0000000000..63e82d1d35 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/challenges/mod.rs @@ -0,0 +1,6 @@ +mod handlers; +mod logic; +mod routes; +mod schemas; + +pub use routes::filter; diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/challenges/routes.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/challenges/routes.rs new file mode 100644 index 0000000000..4afa0fcabb --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/challenges/routes.rs @@ -0,0 +1,23 @@ +use super::handlers::*; +use crate::v0::context::SharedContext; +use warp::filters::BoxedFilter; +use warp::{Filter, Rejection, Reply}; + +pub async fn filter( + root: BoxedFilter<()>, + context: SharedContext, +) -> impl Filter + Clone { + let with_context = warp::any().map(move || context.clone()); + + let challenges = warp::path::end() + .and(warp::get()) + .and(with_context.clone()) + .and_then(get_challenges); + + let challenge_by_id = warp::path!(i32) + .and(warp::get()) + .and(with_context) + .and_then(get_challenge_by_id); + + root.and(challenge_by_id.or(challenges)) +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/challenges/schemas.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/challenges/schemas.rs new file mode 100644 index 0000000000..d6f9358ad9 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/challenges/schemas.rs @@ -0,0 +1,9 @@ +use crate::db::models::{challenges::Challenge, proposals::Proposal}; +use serde::{Deserialize, Serialize}; + +#[derive(Serialize, Deserialize)] +pub struct ChallengeWithProposals { + #[serde(flatten)] + pub challenge: Challenge, + pub proposals: Vec, +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/funds/handlers.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/funds/handlers.rs new file mode 100644 index 0000000000..27b782fd4b --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/funds/handlers.rs @@ -0,0 +1,218 @@ +use super::logic; +use crate::v0::result::HandlerResult; +use crate::{db::models::funds::Fund, v0::context::SharedContext}; +use warp::{Rejection, Reply}; + +pub async fn get_fund_by_id(id: i32, context: SharedContext) -> Result { + Ok(HandlerResult(logic::get_fund_by_id(id, context).await)) +} + +pub async fn get_fund(context: SharedContext) -> Result { + Ok(HandlerResult(logic::get_current_fund(context).await)) +} + +pub async fn get_all_funds(context: SharedContext) -> Result { + Ok(HandlerResult(logic::get_all_funds(context).await)) +} + +pub async fn put_fund(fund: Fund, context: SharedContext) -> Result { + Ok(HandlerResult(logic::put_fund(fund, context).await)) +} + +#[cfg(test)] +pub mod test { + use super::*; + use crate::db::{ + migrations as db_testing, + models::funds::{test as funds_testing, Fund}, + queries::funds::FundWithNext, + }; + use crate::v0::context::test::new_in_memmory_db_test_shared_context; + use warp::Filter; + + #[tokio::test] + async fn get_fund_handler() { + // build context + let shared_context = new_in_memmory_db_test_shared_context(); + let filter_context = shared_context.clone(); + let with_context = warp::any().map(move || filter_context.clone()); + + // initialize db + let pool = &shared_context.read().await.db_connection_pool; + db_testing::initialize_db_with_migration(&pool.get().unwrap()); + let fund: Fund = funds_testing::get_test_fund(Some(1)); + let mut next_fund: Fund = funds_testing::get_test_fund(Some(2)); + + next_fund.challenges = Vec::new(); + next_fund.chain_vote_plans = Vec::new(); + + funds_testing::populate_db_with_fund(&fund, pool); + funds_testing::populate_db_with_fund(&next_fund, pool); + + // build filter + let filter = warp::any() + .and(warp::get()) + .and(with_context) + .and_then(get_fund); + + let result = warp::test::request().method("GET").reply(&filter).await; + assert_eq!(result.status(), warp::http::StatusCode::OK); + let result_fund: FundWithNext = + serde_json::from_str(&String::from_utf8(result.body().to_vec()).unwrap()).unwrap(); + assert_eq!(fund, result_fund.fund); + + let next = result_fund.next.unwrap(); + assert_eq!(next_fund.id, next.id); + assert_eq!(next_fund.fund_name, next.fund_name); + assert_eq!(next_fund.stage_dates, next.stage_dates); + } + + #[tokio::test] + async fn get_fund_by_id_handler() { + // build context + let shared_context = new_in_memmory_db_test_shared_context(); + let filter_context = shared_context.clone(); + let with_context = warp::any().map(move || filter_context.clone()); + + // initialize db + let pool = &shared_context.read().await.db_connection_pool; + db_testing::initialize_db_with_migration(&pool.get().unwrap()); + let fund: Fund = funds_testing::get_test_fund(None); + funds_testing::populate_db_with_fund(&fund, pool); + + // build filter + let filter = warp::path!(i32) + .and(warp::get()) + .and(with_context) + .and_then(get_fund_by_id); + + let result = warp::test::request() + .method("GET") + .path(&format!("/{}", fund.id)) + .reply(&filter) + .await; + assert_eq!(result.status(), warp::http::StatusCode::OK); + let result_fund: Fund = + serde_json::from_str(&String::from_utf8(result.body().to_vec()).unwrap()).unwrap(); + assert_eq!(fund, result_fund); + } + + #[tokio::test] + async fn get_all_funds_handler() { + let shared_context = new_in_memmory_db_test_shared_context(); + let filter_context = shared_context.clone(); + let with_context = warp::any().map(move || filter_context.clone()); + + let pool = &shared_context.read().await.db_connection_pool; + db_testing::initialize_db_with_migration(&pool.get().unwrap()); + + let fund1: Fund = funds_testing::get_test_fund(Some(1)); + let mut fund2: Fund = funds_testing::get_test_fund(Some(2)); + + fund2.challenges = vec![]; + fund2.chain_vote_plans = vec![]; + + funds_testing::populate_db_with_fund(&fund1, pool); + funds_testing::populate_db_with_fund(&fund2, pool); + + let filter = warp::any() + .and(warp::get()) + .and(with_context) + .and_then(get_all_funds); + + let result = warp::test::request().method("GET").reply(&filter).await; + assert_eq!(result.status(), warp::http::StatusCode::OK); + let result_funds: Vec = + serde_json::from_str(&String::from_utf8(result.body().to_vec()).unwrap()).unwrap(); + + assert_eq!(vec![fund1.id, fund2.id], result_funds); + } + + #[tokio::test] + async fn put_fund_handler() { + let shared_context = new_in_memmory_db_test_shared_context(); + let filter_context = shared_context.clone(); + let with_context = warp::any().map(move || filter_context.clone()); + + let pool = &shared_context.read().await.db_connection_pool; + db_testing::initialize_db_with_migration(&pool.get().unwrap()); + + let fund1: Fund = funds_testing::get_test_fund(Some(1)); + let mut fund2: Fund = funds_testing::get_test_fund(Some(2)); + let mut fund3: Fund = funds_testing::get_test_fund(Some(3)); + + fund2.challenges = vec![]; + fund2.chain_vote_plans = vec![]; + fund2.goals = vec![]; + + fund3.challenges = vec![]; + fund3.chain_vote_plans = vec![]; + fund3.goals = vec![]; + + funds_testing::populate_db_with_fund(&fund1, pool); + funds_testing::populate_db_with_fund(&fund2, pool); + + let filter = warp::any() + .and(warp::put()) + .and(warp::body::json()) + .and(with_context.clone()) + .and_then(put_fund); + + let mut updated_fund = fund2.clone(); + updated_fund.fund_name = "modified fund name".into(); + + let result = warp::test::request() + .method("PUT") + .body(serde_json::to_string(&updated_fund).unwrap()) + .reply(&filter) + .await; + + assert_eq!(result.status(), warp::http::StatusCode::OK); + + let result_fund = test_get_fund(fund2.id, shared_context.clone()).await; + assert_eq!(updated_fund, result_fund); + + let result_fund = test_get_fund(fund1.id, shared_context.clone()).await; + assert_eq!(fund1, result_fund); + + assert_eq!( + warp::test::request() + .method("PUT") + .body(serde_json::to_string(&fund3).unwrap()) + .reply(&filter) + .await + .status(), + warp::http::StatusCode::OK + ); + + let result_fund = test_get_fund(fund3.id, shared_context.clone()).await; + assert_eq!(fund3, result_fund); + + let result_fund = test_get_fund(fund2.id, shared_context.clone()).await; + assert_eq!(updated_fund.clone(), result_fund.clone()); + // just to be extra sure + assert_ne!(fund2, updated_fund); + + let result_fund = test_get_fund(fund1.id, shared_context.clone()).await; + assert_eq!(fund1, result_fund); + } + + async fn test_get_fund(id: i32, context: SharedContext) -> Fund { + let with_context = warp::any().map(move || context.clone()); + + let get_filter = warp::path!(i32) + .and(warp::get()) + .and(with_context) + .and_then(get_fund_by_id); + + let result = warp::test::request() + .method("GET") + .path(&format!("/{}", id)) + .reply(&get_filter) + .await; + + assert_eq!(result.status(), warp::http::StatusCode::OK); + + serde_json::from_str(&String::from_utf8(result.body().to_vec()).unwrap()).unwrap() + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/funds/logic.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/funds/logic.rs new file mode 100644 index 0000000000..651fa9dac8 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/funds/logic.rs @@ -0,0 +1,24 @@ +use crate::db::queries::funds::FundWithNext; +use crate::db::{models::funds::Fund, queries::funds as funds_queries}; +use crate::v0::context::SharedContext; +use crate::v0::errors::HandleError; + +pub async fn get_fund_by_id(id: i32, context: SharedContext) -> Result { + let pool = &context.read().await.db_connection_pool; + funds_queries::query_fund_by_id(id, pool).await +} + +pub async fn get_current_fund(context: SharedContext) -> Result { + let pool = &context.read().await.db_connection_pool; + funds_queries::query_current_fund(pool).await +} + +pub async fn get_all_funds(context: SharedContext) -> Result, HandleError> { + let pool = &context.read().await.db_connection_pool; + funds_queries::query_all_funds(pool).await +} + +pub async fn put_fund(fund: Fund, context: SharedContext) -> Result<(), HandleError> { + let pool = &context.read().await.db_connection_pool; + funds_queries::put_fund(fund, pool).await +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/funds/mod.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/funds/mod.rs new file mode 100644 index 0000000000..ababeebab2 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/funds/mod.rs @@ -0,0 +1,5 @@ +mod handlers; +mod logic; +mod routes; + +pub use routes::{admin_filter, filter}; diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/funds/routes.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/funds/routes.rs new file mode 100644 index 0000000000..d91c536e27 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/funds/routes.rs @@ -0,0 +1,44 @@ +use super::handlers::*; +use crate::v0::context::SharedContext; +use warp::filters::BoxedFilter; +use warp::{Filter, Rejection, Reply}; + +pub async fn filter( + root: BoxedFilter<()>, + context: SharedContext, +) -> impl Filter + Clone { + let with_context = warp::any().map(move || context.clone()); + + let fund = warp::path::end() + .and(warp::get()) + .and(with_context.clone()) + .and_then(get_fund) + .boxed(); + + let fund_by_id = warp::path!(i32) + .and(warp::get()) + .and(with_context.clone()) + .and_then(get_fund_by_id) + .boxed(); + + let all_funds = warp::path::end() + .and(warp::get()) + .and(with_context) + .and_then(get_all_funds) + .boxed(); + + // fund_by_id need to be checked first otherwise requests are swallowed by the fund::any + root.and(fund_by_id.or(fund).or(all_funds)).boxed() +} + +pub fn admin_filter( + context: SharedContext, +) -> impl Filter + Clone { + let with_context = warp::any().map(move || context.clone()); + + warp::path::end() + .and(warp::put()) + .and(warp::body::json()) + .and(with_context) + .and_then(put_fund) +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/genesis/handlers.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/genesis/handlers.rs new file mode 100644 index 0000000000..66656a9b01 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/genesis/handlers.rs @@ -0,0 +1,60 @@ +use crate::v0::context::SharedContext; +use warp::{http::Response, Rejection, Reply}; + +pub async fn get_genesis(context: SharedContext) -> Result { + let mut response: Vec = context.read().await.block0.clone(); + + // check if block0 is not loaded and try to load it again + if response.is_empty() { + let block0_path = context.read().await.block0_path.clone(); + response = tokio::fs::read(block0_path).await.unwrap_or_default(); + if !response.is_empty() { + context.write().await.block0 = response.clone(); + } + } + + // if we have no block0 + if response.is_empty() { + Ok(Response::builder() + .status(warp::http::status::StatusCode::NO_CONTENT) + .header("Content-Type", "application/octet-stream") + .body(response) + .unwrap()) + // if we have a block0 + } else { + Ok(Response::builder() + .header("Content-Type", "application/octet-stream") + .body(response) + .unwrap()) + } +} + +#[cfg(test)] +mod test { + use super::*; + use crate::v0::context::test::new_test_shared_context; + use warp::Filter; + + #[tokio::test] + async fn get_block0_succeed() { + // build context + let block0_path = "../resources/tests/block0.bin"; + let shared_context = new_test_shared_context("", block0_path); + let block0 = std::fs::read(block0_path).unwrap(); + + let with_context = warp::any().map(move || shared_context.clone()); + + // build filter + let filter = warp::any() + .and(warp::get()) + .and(with_context) + .and_then(get_genesis); + + // check status code and block0 data + let result = warp::test::request().method("GET").reply(&filter).await; + + assert_eq!(result.status(), warp::http::StatusCode::OK); + let body = result.body().to_vec(); + assert_eq!(block0, body); + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/genesis/mod.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/genesis/mod.rs new file mode 100644 index 0000000000..2a6c75df57 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/genesis/mod.rs @@ -0,0 +1,4 @@ +mod handlers; +mod routes; + +pub use routes::filter; diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/genesis/routes.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/genesis/routes.rs new file mode 100644 index 0000000000..7a2be680b9 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/genesis/routes.rs @@ -0,0 +1,19 @@ +use super::handlers::get_genesis; +use crate::v0::context::SharedContext; +use warp::filters::BoxedFilter; +use warp::{Filter, Rejection, Reply}; + +pub fn filter( + root: BoxedFilter<()>, + context: SharedContext, +) -> impl Filter + Clone { + let with_context = warp::any().map(move || context.clone()); + + let block0 = warp::path::end() + .and(warp::get()) + .and(with_context) + .and_then(get_genesis) + .boxed(); + + root.and(block0).boxed() +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/health/handlers.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/health/handlers.rs new file mode 100644 index 0000000000..b0e5c60a22 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/health/handlers.rs @@ -0,0 +1,35 @@ +use crate::v0::context::SharedContext; +use warp::{Rejection, Reply}; + +pub async fn check_health(_context: SharedContext) -> Result { + Ok(warp::reply()) +} + +#[cfg(test)] +pub mod test { + use super::*; + use crate::v0::context::test::new_in_memmory_db_test_shared_context; + use warp::Filter; + + #[tokio::test] + async fn get_proposal_by_id_handler() { + // build context + let shared_context = new_in_memmory_db_test_shared_context(); + let filter_context = shared_context.clone(); + let with_context = warp::any().map(move || filter_context.clone()); + + // build filter + let filter = warp::path!("health" / ..) + .and(warp::get()) + .and(with_context) + .and_then(check_health); + + let result = warp::test::request() + .method("GET") + .path("/health") + .reply(&filter) + .await; + + assert_eq!(result.status(), warp::http::StatusCode::OK); + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/health/mod.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/health/mod.rs new file mode 100644 index 0000000000..2a6c75df57 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/health/mod.rs @@ -0,0 +1,4 @@ +mod handlers; +mod routes; + +pub use routes::filter; diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/health/routes.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/health/routes.rs new file mode 100644 index 0000000000..7953f20b97 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/health/routes.rs @@ -0,0 +1,19 @@ +use super::handlers::*; +use crate::v0::context::SharedContext; +use warp::filters::BoxedFilter; +use warp::{Filter, Rejection, Reply}; + +pub async fn filter( + root: BoxedFilter<()>, + context: SharedContext, +) -> impl Filter + Clone { + let with_context = warp::any().map(move || context.clone()); + + let health_filter = warp::path::end() + .and(warp::get()) + .and(with_context) + .and_then(check_health) + .boxed(); + + root.and(health_filter).boxed() +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/mod.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/mod.rs new file mode 100644 index 0000000000..d27052b270 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/mod.rs @@ -0,0 +1,69 @@ +mod advisor_reviews; +mod challenges; +mod funds; +mod genesis; +mod health; +pub mod proposals; +pub mod service_version; + +use crate::v0::context::SharedContext; + +use crate::v0::api_token; +use warp::filters::BoxedFilter; +use warp::{Filter, Rejection, Reply}; + +pub async fn filter( + root: BoxedFilter<()>, + context: SharedContext, + enable_api_tokens: bool, +) -> impl Filter + Clone { + // mount health endpoint + let health_root = warp::path!("health" / ..); + let health_filter = health::filter(health_root.boxed(), context.clone()).await; + + // mount chain-data endpoint + let chain_data_root = warp::path!("proposals" / ..); + let chain_data_filter = proposals::filter(chain_data_root.boxed(), context.clone()).await; + + // mount funds endpoint + let funds_root = warp::path!("fund" / ..); + let funds_filter = funds::filter(funds_root.boxed(), context.clone()).await; + + // mount challenges endpoint + let challenges_root = warp::path!("challenges" / ..); + let challenges_filter = challenges::filter(challenges_root.boxed(), context.clone()).await; + + // mount genesis endpoint + let genesis_root = warp::path!("block0" / ..); + let genesis_filter = genesis::filter(genesis_root.boxed(), context.clone()); + + let reviews_root = warp::path!("reviews" / ..); + let reviews_filter = advisor_reviews::filter(reviews_root.boxed(), context.clone()).await; + + let admin_filter = { + let base = warp::path!("admin" / ..); + + let fund_filter = warp::path!("fund" / ..).and(funds::admin_filter(context.clone())); + + base.and(fund_filter) + }; + + let api_token_filter = if enable_api_tokens { + api_token::api_token_filter(context).await.boxed() + } else { + warp::any().boxed() + }; + + root.and( + api_token_filter.and( + health_filter + .or(genesis_filter) + .or(chain_data_filter) + .or(funds_filter) + .or(challenges_filter) + .or(reviews_filter) + .or(admin_filter), + ), + ) + .boxed() +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/proposals/handlers.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/proposals/handlers.rs new file mode 100644 index 0000000000..1ba1331589 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/proposals/handlers.rs @@ -0,0 +1,143 @@ +use super::logic; +use crate::v0::endpoints::proposals::requests::ProposalsByVoteplanIdAndIndex; +use crate::v0::{context::SharedContext, result::HandlerResult}; +use warp::{Rejection, Reply}; + +pub async fn get_proposal(id: i32, context: SharedContext) -> Result { + Ok(HandlerResult(logic::get_proposal(id, context).await)) +} + +pub async fn get_all_proposals(context: SharedContext) -> Result { + Ok(HandlerResult(logic::get_all_proposals(context).await)) +} + +pub async fn get_proposals_by_voteplan_id_and_index( + body: ProposalsByVoteplanIdAndIndex, + context: SharedContext, +) -> Result { + Ok(HandlerResult( + logic::get_proposals_by_voteplan_id_and_index(body, context).await, + )) +} + +#[cfg(test)] +pub mod test { + use super::*; + use crate::db::{ + migrations as db_testing, + models::{ + challenges::{test as challenges_testing, Challenge}, + community_advisors_reviews::test as reviews_testing, + proposals::{test as proposals_testing, *}, + }, + }; + use crate::v0::context::test::new_in_memmory_db_test_shared_context; + use crate::v0::endpoints::proposals::requests::ProposalVoteplanIdAndIndexes; + use warp::Filter; + + #[tokio::test] + async fn get_proposal_by_id_handler() { + // build context + let shared_context = new_in_memmory_db_test_shared_context(); + let filter_context = shared_context.clone(); + let with_context = warp::any().map(move || filter_context.clone()); + + // initialize db + let pool = &shared_context.read().await.db_connection_pool; + db_testing::initialize_db_with_migration(&pool.get().unwrap()); + let mut proposal: FullProposalInfo = proposals_testing::get_test_proposal(); + proposals_testing::populate_db_with_proposal(&proposal, pool); + let challenge: Challenge = + challenges_testing::get_test_challenge_with_fund_id(proposal.proposal.fund_id); + challenges_testing::populate_db_with_challenge(&challenge, pool); + + let review = reviews_testing::get_test_advisor_review_with_proposal_id( + proposal.proposal.proposal_id.parse().unwrap(), + ); + reviews_testing::populate_db_with_advisor_review(&review, pool); + proposal.proposal.reviews_count = 1; + // build filter + let filter = warp::path!(i32) + .and(warp::get()) + .and(with_context) + .and_then(get_proposal); + + let result = warp::test::request() + .method("GET") + .path("/1") + .reply(&filter) + .await; + assert_eq!(result.status(), warp::http::StatusCode::OK); + println!("{}", String::from_utf8(result.body().to_vec()).unwrap()); + let result_proposal: FullProposalInfo = + serde_json::from_str(&String::from_utf8(result.body().to_vec()).unwrap()).unwrap(); + assert_eq!(proposal, result_proposal); + } + + #[tokio::test] + async fn get_all_proposals_handler() { + // build context + let shared_context = new_in_memmory_db_test_shared_context(); + let filter_context = shared_context.clone(); + let with_context = warp::any().map(move || filter_context.clone()); + + // initialize db + let pool = &shared_context.read().await.db_connection_pool; + db_testing::initialize_db_with_migration(&pool.get().unwrap()); + let proposal: FullProposalInfo = proposals_testing::get_test_proposal(); + proposals_testing::populate_db_with_proposal(&proposal, pool); + let challenge: Challenge = + challenges_testing::get_test_challenge_with_fund_id(proposal.proposal.fund_id); + challenges_testing::populate_db_with_challenge(&challenge, pool); + // build filter + let filter = warp::any() + .and(warp::get()) + .and(with_context) + .and_then(get_all_proposals); + + let result = warp::test::request().method("GET").reply(&filter).await; + assert_eq!(result.status(), warp::http::StatusCode::OK); + let result_proposals: Vec = + serde_json::from_str(&String::from_utf8(result.body().to_vec()).unwrap()).unwrap(); + assert_eq!(vec![proposal], result_proposals); + } + + #[tokio::test] + async fn get_proposal_by_voteplan_id_and_index() { + // build context + let shared_context = new_in_memmory_db_test_shared_context(); + let filter_context = shared_context.clone(); + let with_context = warp::any().map(move || filter_context.clone()); + + // initialize db + let pool = &shared_context.read().await.db_connection_pool; + db_testing::initialize_db_with_migration(&pool.get().unwrap()); + let proposal: FullProposalInfo = proposals_testing::get_test_proposal(); + proposals_testing::populate_db_with_proposal(&proposal, pool); + let challenge: Challenge = + challenges_testing::get_test_challenge_with_fund_id(proposal.proposal.fund_id); + challenges_testing::populate_db_with_challenge(&challenge, pool); + // build filter + let filter = warp::any() + .and(warp::post()) + .and(warp::body::json()) + .and(with_context) + .and_then(get_proposals_by_voteplan_id_and_index); + + let request = ProposalVoteplanIdAndIndexes { + vote_plan_id: proposal.proposal.chain_voteplan_id.clone(), + indexes: vec![proposal.proposal.chain_proposal_index], + }; + + let result = warp::test::request() + .method("POST") + .json(&vec![request]) + .reply(&filter) + .await; + + assert_eq!(result.status(), warp::http::StatusCode::OK); + let result_proposals: Vec = + serde_json::from_str(&String::from_utf8(result.body().to_vec()).unwrap()).unwrap(); + assert_eq!(vec![proposal], result_proposals); + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/proposals/logic.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/proposals/logic.rs new file mode 100644 index 0000000000..a6fd5f6467 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/proposals/logic.rs @@ -0,0 +1,51 @@ +use crate::db::{models::proposals::FullProposalInfo, queries::proposals as proposals_queries}; +use crate::v0::endpoints::proposals::requests::ProposalsByVoteplanIdAndIndex; +use crate::v0::{context::SharedContext, errors::HandleError}; + +pub async fn get_all_proposals( + context: SharedContext, +) -> Result, HandleError> { + let pool = &context.read().await.db_connection_pool; + proposals_queries::query_all_proposals(pool).await +} + +pub async fn get_proposal( + id: i32, + context: SharedContext, +) -> Result { + let pool = &context.read().await.db_connection_pool; + proposals_queries::query_proposal_by_id(id, pool).await +} + +pub async fn get_proposals_by_voteplan_id_and_index( + query_data: ProposalsByVoteplanIdAndIndex, + context: SharedContext, +) -> Result, HandleError> { + let pool = context.read().await.db_connection_pool.clone(); + let tasks: Vec<_> = query_data + .into_iter() + .map(|proposal_query| { + tokio::spawn( + proposals_queries::query_proposals_by_voteplan_id_and_indexes( + proposal_query.vote_plan_id, + proposal_query.indexes, + pool.clone(), + ), + ) + }) + .collect(); + let mut results = Vec::new(); + + for task in tasks { + results.push( + task.await.map_err(|e| { + HandleError::InternalError(format!("Error executing task: {:?}", e)) + })??, + ); + } + + Ok(results + .into_iter() + .flat_map(IntoIterator::into_iter) + .collect()) +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/proposals/mod.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/proposals/mod.rs new file mode 100644 index 0000000000..44c76efc80 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/proposals/mod.rs @@ -0,0 +1,7 @@ +mod handlers; +mod logic; +mod requests; +mod routes; + +pub use requests::{ProposalVoteplanIdAndIndexes, ProposalsByVoteplanIdAndIndex}; +pub use routes::filter; diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/proposals/requests.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/proposals/requests.rs new file mode 100644 index 0000000000..0159ec3400 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/proposals/requests.rs @@ -0,0 +1,9 @@ +use serde::{Deserialize, Serialize}; + +#[derive(Serialize, Deserialize, Debug)] +pub struct ProposalVoteplanIdAndIndexes { + pub vote_plan_id: String, + pub indexes: Vec, +} + +pub type ProposalsByVoteplanIdAndIndex = Vec; diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/proposals/routes.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/proposals/routes.rs new file mode 100644 index 0000000000..5f128171b0 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/proposals/routes.rs @@ -0,0 +1,32 @@ +use super::handlers::*; +use crate::v0::context::SharedContext; +use warp::filters::BoxedFilter; +use warp::{Filter, Rejection, Reply}; + +pub async fn filter( + root: BoxedFilter<()>, + context: SharedContext, +) -> impl Filter + Clone { + let with_context = warp::any().map(move || context.clone()); + + let from_id = warp::path!(i32) + .and(warp::get()) + .and(with_context.clone()) + .and_then(get_proposal) + .boxed(); + + let proposals = warp::path::end() + .and(warp::get()) + .and(with_context.clone()) + .and_then(get_all_proposals) + .boxed(); + + let from_voteplan_id_and_indexes = warp::path::end() + .and(warp::post()) + .and(warp::body::json()) + .and(with_context) + .and_then(get_proposals_by_voteplan_id_and_index); + + root.and(from_id.or(proposals).or(from_voteplan_id_and_indexes)) + .boxed() +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/service_version/handlers.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/service_version/handlers.rs new file mode 100644 index 0000000000..a7e20a1a31 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/service_version/handlers.rs @@ -0,0 +1,37 @@ +use super::logic; +use crate::v0::{context::SharedContext, result::HandlerResult}; +use warp::{Rejection, Reply}; + +pub async fn service_version(context: SharedContext) -> Result { + Ok(HandlerResult(logic::service_version(context).await)) +} + +#[cfg(test)] +pub mod test { + use super::*; + + use crate::v0::context::test::new_in_memmory_db_test_shared_context; + use crate::v0::endpoints::service_version::schemas::ServiceVersion; + use warp::Filter; + + #[tokio::test] + async fn get_proposal_by_id_handler() { + // build context + let shared_context = new_in_memmory_db_test_shared_context(); + let filter_context = shared_context.clone(); + let with_context = warp::any().map(move || filter_context.clone()); + + // build filter + let filter = warp::path::end() + .and(warp::get()) + .and(with_context) + .and_then(service_version); + + let result = warp::test::request().method("GET").reply(&filter).await; + assert_eq!(result.status(), warp::http::StatusCode::OK); + println!("{}", String::from_utf8(result.body().to_vec()).unwrap()); + let service_version_result: ServiceVersion = + serde_json::from_str(&String::from_utf8(result.body().to_vec()).unwrap()).unwrap(); + assert_eq!(service_version_result.service_version, "2.0".to_string()); + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/service_version/logic.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/service_version/logic.rs new file mode 100644 index 0000000000..baf3fcd8f8 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/service_version/logic.rs @@ -0,0 +1,6 @@ +use super::schemas::ServiceVersion; +use crate::v0::{context::SharedContext, errors::HandleError}; +pub async fn service_version(context: SharedContext) -> Result { + let service_version = context.read().await.versioning.clone(); + Ok(ServiceVersion { service_version }) +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/service_version/mod.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/service_version/mod.rs new file mode 100644 index 0000000000..a7f7da0564 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/service_version/mod.rs @@ -0,0 +1,7 @@ +mod handlers; +mod logic; +mod routes; +pub mod schemas; + +pub use routes::filter; +pub use schemas::ServiceVersion; diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/service_version/routes.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/service_version/routes.rs new file mode 100644 index 0000000000..c718d5048f --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/service_version/routes.rs @@ -0,0 +1,19 @@ +use super::handlers::service_version; +use crate::v0::context::SharedContext; +use warp::filters::BoxedFilter; +use warp::{Filter, Rejection, Reply}; + +pub async fn filter( + root: BoxedFilter<()>, + context: SharedContext, +) -> impl Filter + Clone { + let with_context = warp::any().map(move || context.clone()); + + let service_version_filter = warp::path::end() + .and(warp::get()) + .and(with_context) + .and_then(service_version) + .boxed(); + + root.and(service_version_filter).boxed() +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/service_version/schemas.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/service_version/schemas.rs new file mode 100644 index 0000000000..ee9b9a165a --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/endpoints/service_version/schemas.rs @@ -0,0 +1,6 @@ +use serde::{Deserialize, Serialize}; + +#[derive(Serialize, Deserialize)] +pub struct ServiceVersion { + pub service_version: String, +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/errors.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/errors.rs new file mode 100644 index 0000000000..381e7015e5 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/errors.rs @@ -0,0 +1,61 @@ +use thiserror::Error; +use warp::{reply::Response, Rejection, Reply}; + +#[derive(Error, Debug)] +pub enum HandleError { + #[error("The data requested data for `{0}` is not available")] + NotFound(String), + + #[error("Internal error")] + DatabaseError(#[from] diesel::r2d2::PoolError), + + #[error("Unauthorized token")] + UnauthorizedToken, + + #[error("Internal error, cause: {0}")] + InternalError(String), + + #[error("Invalid header {0}, cause: {1}")] + InvalidHeader(&'static str, &'static str), +} + +impl HandleError { + fn to_status_code(&self) -> warp::http::StatusCode { + match self { + HandleError::NotFound(_) => warp::http::StatusCode::NOT_FOUND, + HandleError::DatabaseError(_) => warp::http::StatusCode::SERVICE_UNAVAILABLE, + HandleError::InternalError(_) => warp::http::StatusCode::INTERNAL_SERVER_ERROR, + HandleError::UnauthorizedToken => warp::http::StatusCode::UNAUTHORIZED, + HandleError::InvalidHeader(_, _) => warp::http::StatusCode::BAD_REQUEST, + } + } + + fn to_message(&self) -> String { + format!("{}", self) + } + + fn to_response(&self) -> Response { + let status_code = self.to_status_code(); + warp::reply::with_status(warp::reply::json(&self.to_json()), status_code).into_response() + } + + fn to_json(&self) -> serde_json::Value { + serde_json::json!({"code": self.to_status_code().as_u16(), "message" : self.to_message()}) + } +} + +impl warp::Reply for HandleError { + fn into_response(self) -> Response { + self.to_response() + } +} + +impl warp::reject::Reject for HandleError {} + +pub async fn handle_rejection(err: Rejection) -> Result { + if let Some(handle_error) = err.find::() { + return Ok(handle_error.to_response()); + } + + Err(err) +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/mod.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/mod.rs new file mode 100644 index 0000000000..56dc014f58 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/mod.rs @@ -0,0 +1,57 @@ +pub mod api_token; +pub mod context; +pub mod endpoints; +pub mod errors; +pub mod result; + +use warp::{Filter, Rejection, Reply}; + +const V0_REQUEST_TRACE_NAME: &str = "v0_request"; + +pub async fn filter( + ctx: context::SharedContext, + enable_api_tokens: bool, +) -> impl Filter + Clone { + let api_root = warp::path!("api" / ..); + + let v0_root = warp::path!("v0" / ..); + let service_version_root = warp::path!("vit-version" / ..); + // log request statistics + let log = warp::filters::trace::trace(|info| { + use http_zipkin::get_trace_context; + use tracing::field::Empty; + let span = tracing::span!( + tracing::Level::DEBUG, + "rest_api_request", + method = %info.method(), + path = info.path(), + version = ?info.version(), + remote_addr = Empty, + trace_id = Empty, + span_id = Empty, + parent_span_id = Empty, + ); + if let Some(remote_addr) = info.remote_addr() { + span.record("remote_addr", remote_addr.to_string().as_str()); + } + if let Some(trace_context) = get_trace_context(info.request_headers()) { + span.record("trace_id", trace_context.trace_id().to_string().as_str()); + span.record("span_id", trace_context.span_id().to_string().as_str()); + if let Some(parent_span_id) = trace_context.parent_id() { + span.record("parent_span_id", parent_span_id.to_string().as_str()); + } + } + span + }); + + let v0 = endpoints::filter(v0_root.boxed(), ctx.clone(), enable_api_tokens).await; + + let service_version = + endpoints::service_version::filter(service_version_root.boxed(), ctx).await; + + api_root + .and(v0.or(service_version)) + .with(warp::trace::named(V0_REQUEST_TRACE_NAME)) + .recover(errors::handle_rejection) + .with(log) +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/result.rs b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/result.rs new file mode 100644 index 0000000000..696a2f31a6 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-lib-f10/src/v0/result.rs @@ -0,0 +1,15 @@ +use super::errors::HandleError; +use serde::Serialize; +use warp::reply::Response; +use warp::Reply; + +pub struct HandlerResult(pub Result); + +impl Reply for HandlerResult { + fn into_response(self) -> Response { + match self.0 { + Ok(res) => warp::reply::json(&res).into_response(), + Err(error) => error.into_response(), + } + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-server-f10/Cargo.toml b/src/vit-servicing-station-f10/vit-servicing-station-server-f10/Cargo.toml new file mode 100644 index 0000000000..7a7b2193df --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-server-f10/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "vit-servicing-station-server-f10" +version = "0.5.0" +authors = ["danielsanchezq "] +edition = "2018" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +log = "0.4.11" +structopt = "0.3.14" +tokio = { version = "^1.0", features = ["macros", "rt-multi-thread"] } +tracing = "0.1.21" +tracing-futures = "0.2.4" +tracing-subscriber = { version = "0.3", features=["fmt"] } +tracing-appender = "0.2" +vit-servicing-station-lib-f10 = { path = "../vit-servicing-station-lib-f10" } +#snapshot-service = { path = "../snapshot-service" } diff --git a/src/vit-servicing-station-f10/vit-servicing-station-server-f10/src/main.rs b/src/vit-servicing-station-f10/vit-servicing-station-server-f10/src/main.rs new file mode 100644 index 0000000000..9889669fa3 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-server-f10/src/main.rs @@ -0,0 +1,126 @@ +use std::path::{Path, PathBuf}; +use structopt::StructOpt; + +use tracing::{error, info}; +use tracing_appender::non_blocking::WorkerGuard; +use vit_servicing_station_lib_f10::{ + db, server, server::exit_codes::ApplicationExitCode, server::settings as server_settings, + server::settings::ServiceSettings, v0, +}; + +fn check_and_build_proper_path(path: &Path) -> std::io::Result<()> { + use std::fs; + // create parent dirs if not exists + fs::create_dir_all(path.parent().ok_or_else(|| { + std::io::Error::new( + std::io::ErrorKind::InvalidData, + format!("Cannot create path tree {}", path.to_str().unwrap()), + ) + })?)?; + fs::OpenOptions::new().create(true).write(true).open(path)?; + Ok(()) +} + +fn config_tracing( + level: server_settings::LogLevel, + pathbuf: Option, +) -> Result { + if let Some(path) = pathbuf { + // check path integrity + // we try opening the file since tracing appender would just panic instead of + // returning an error + check_and_build_proper_path(&path)?; + + let file_appender = tracing_appender::rolling::never( + path.parent().ok_or_else(|| { + std::io::Error::new( + std::io::ErrorKind::InvalidInput, + format!("Log file path `{}` is invalid.", path.display()), + ) + })?, + path.file_name().ok_or_else(|| { + std::io::Error::new( + std::io::ErrorKind::InvalidInput, + format!( + "Log file path `{}` doesn't contain a valid file name.", + path.display() + ), + ) + })?, + ); + + let (non_blocking, guard) = tracing_appender::non_blocking(file_appender); + tracing_subscriber::fmt() + .with_writer(non_blocking) + .with_max_level(level) + .init(); + Ok(guard) + } else { + let (non_blocking, guard) = tracing_appender::non_blocking(std::io::stdout()); + tracing_subscriber::fmt() + .with_writer(non_blocking) + .with_max_level(level) + .init(); + Ok(guard) + } +} + +#[tokio::main] +async fn main() { + // load settings from command line (defaults to env variables) + let mut settings: ServiceSettings = ServiceSettings::from_args(); + + // load settings from file if specified + if let Some(settings_file) = &settings.in_settings_file { + let in_file_settings = server_settings::load_settings_from_file(settings_file) + .unwrap_or_else(|e| { + error!("Error loading settings from file {}, {}", settings_file, e); + std::process::exit(ApplicationExitCode::LoadSettingsError.into()) + }); + // merge input file settings override by cli arguments + settings = in_file_settings.override_from(&settings); + } + + // dump settings and exit if specified + if let Some(settings_file) = &settings.out_settings_file { + server_settings::dump_settings_to_file(settings_file, &settings).unwrap_or_else(|e| { + error!("Error writing settings to file {}: {}", settings_file, e); + std::process::exit(ApplicationExitCode::WriteSettingsError.into()) + }); + std::process::exit(0); + } + + // setup logging + let _guard = config_tracing( + settings.log.log_level.unwrap_or_default(), + settings.log.log_output_path.clone(), + ) + .unwrap_or_else(|e| { + error!("Error setting up logging: {}", e); + std::process::exit(ApplicationExitCode::LoadSettingsError.into()) + }); + + // Check db file exists (should be here only for current sqlite db backend) + if !std::path::Path::new(&settings.db_url).exists() { + error!("DB file {} not found.", &settings.db_url); + std::process::exit(ApplicationExitCode::DbConnectionError.into()) + } + // load db pool + let db_pool = db::load_db_connection_pool(&settings.db_url).unwrap_or_else(|e| { + error!("Error connecting to database: {}", e); + std::process::exit(ApplicationExitCode::DbConnectionError.into()) + }); + + let context = + v0::context::new_shared_context(db_pool, &settings.block0_path, &settings.service_version); + + let app = v0::filter(context, settings.enable_api_tokens).await; + + info!( + "Running server at {}, database located at {}", + settings.address, settings.db_url + ); + + // run server with settings + server::start_server(app, Some(settings)).await +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/Cargo.toml b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/Cargo.toml new file mode 100644 index 0000000000..11e3e3a125 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/Cargo.toml @@ -0,0 +1,53 @@ +[package] +name = "vit-servicing-station-tests-f10" +version = "0.5.0" +authors = ["dkijania "] +edition = "2018" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +base64 = "0.12.1" +cfg-if = "0.1" +time = { version = "0.3", features = ["formatting", "parsing", "macros"] } +diesel = { version = "1.4.4", features = ["sqlite", "r2d2"] } +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0.53" +structopt = "0.3.14" +thiserror = "1.0" +tokio = { version = "1.0", features = ["macros", "sync", "fs"] } +diesel_migrations = "1.4.0" +tempfile = "3" +rand_core = "0.5" +assert_fs = "1.0.0" +assert_cmd = "2" +hyper = "0.14.2" +vit-servicing-station-lib-f10 = {path= "../vit-servicing-station-lib-f10"} +lazy_static = "1.4" +rand = "0.7" +url = "2.2" +quickcheck = { version = "0.9" } +quickcheck_macros = { version = "0.9" } +predicates = { version = "2.0", default-features = false, features = ["diff"] } +jortestkit = { path = "../../jortestkit" } +chain-impl-mockchain = { workspace = true, features = [ "property-test-api" ] } +chain-addr = { path = "../../chain-libs/chain-addr", features = [ "property-test-api" ] } +chain-crypto = { path = "../../chain-libs/chain-crypto", features = [ "property-test-api" ] } +fake = { version = "2.2", features=['http']} +pretty_assertions = "0.6" +dyn-clone = "1.0.4" +itertools = "0.10.3" + +reqwest = { workspace = true } + +# This solves building on windows when sqlite3lib is not installed or missing in the `$PATH` +# as it happens with the github actions pipeline associated to this project. +[target.'cfg(windows)'.dependencies] +libsqlite3-sys = { version = "0.9.3", features = ["bundled"] } + +[features] +test-api = [] +# feature for excluding performance tests from standard cargo test run +non-functional = [] +# sub category of non-functional tests +soak = [] diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/build.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/build.rs new file mode 100644 index 0000000000..990e91ed81 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/build.rs @@ -0,0 +1,9 @@ +fn main() { + let vit_bin_name = option_env!("VIT_BIN_NAME").unwrap_or("vit-servicing-station-server"); + println!("cargo:rustc-env=VIT_BIN_NAME={}", vit_bin_name); + + let vit_cli_name = option_env!("VIT_CLI_NAME").unwrap_or("vit-servicing-station-cli"); + println!("cargo:rustc-env=VIT_CLI_NAME={}", vit_cli_name); + + println!("cargo:rustc-env=RUST_BACKTRACE=full"); +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/cli/commands/csv_data/load.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/cli/commands/csv_data/load.rs new file mode 100644 index 0000000000..16d52551ad --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/cli/commands/csv_data/load.rs @@ -0,0 +1,50 @@ +use std::path::Path; +use std::process::Command; +pub struct LoadCsvCommand { + command: Command, +} + +impl LoadCsvCommand { + pub fn new(command: Command) -> Self { + Self { command } + } + + pub fn db_url>(mut self, db_url: P) -> Self { + self.command.arg("--db-url").arg(db_url.as_ref()); + self + } + + pub fn funds>(mut self, funds: P) -> Self { + self.command.arg("--funds").arg(funds.as_ref()); + self + } + + pub fn proposals>(mut self, proposals: P) -> Self { + self.command.arg("--proposals").arg(proposals.as_ref()); + self + } + + pub fn voteplans>(mut self, voteplans: P) -> Self { + self.command.arg("--voteplans").arg(voteplans.as_ref()); + self + } + + pub fn challenges>(mut self, challenges: P) -> Self { + self.command.arg("--challenges").arg(challenges.as_ref()); + self + } + + pub fn advisor_reviews>(mut self, reviews: P) -> Self { + self.command.arg("--reviews").arg(reviews.as_ref()); + self + } + + pub fn goals>(mut self, goals: P) -> Self { + self.command.arg("--goals").arg(goals.as_ref()); + self + } + + pub fn build(self) -> Command { + self.command + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/cli/commands/csv_data/mod.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/cli/commands/csv_data/mod.rs new file mode 100644 index 0000000000..78622e494e --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/cli/commands/csv_data/mod.rs @@ -0,0 +1,19 @@ +mod load; + +pub use load::LoadCsvCommand; +use std::process::Command; + +pub struct CsvDataCommand { + command: Command, +} + +impl CsvDataCommand { + pub fn new(command: Command) -> Self { + Self { command } + } + + pub fn load(mut self) -> LoadCsvCommand { + self.command.arg("load"); + LoadCsvCommand::new(self.command) + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/cli/commands/db/init.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/cli/commands/db/init.rs new file mode 100644 index 0000000000..eaf600655d --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/cli/commands/db/init.rs @@ -0,0 +1,20 @@ +use std::path::Path; +use std::process::Command; +pub struct InitDbCommand { + command: Command, +} + +impl InitDbCommand { + pub fn new(command: Command) -> Self { + Self { command } + } + + pub fn db_url>(mut self, db_url: P) -> Self { + self.command.arg("--db-url").arg(db_url.as_ref()); + self + } + + pub fn build(self) -> Command { + self.command + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/cli/commands/db/mod.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/cli/commands/db/mod.rs new file mode 100644 index 0000000000..5bd7d1f5a6 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/cli/commands/db/mod.rs @@ -0,0 +1,19 @@ +mod init; + +pub use init::InitDbCommand; +use std::process::Command; + +pub struct DbCommand { + command: Command, +} + +impl DbCommand { + pub fn new(command: Command) -> Self { + Self { command } + } + + pub fn init(mut self) -> InitDbCommand { + self.command.arg("init"); + InitDbCommand::new(self.command) + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/cli/commands/mod.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/cli/commands/mod.rs new file mode 100644 index 0000000000..25ca791979 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/cli/commands/mod.rs @@ -0,0 +1,44 @@ +mod csv_data; +mod db; +mod token; + +use csv_data::CsvDataCommand; +use db::DbCommand; +use token::ApiTokenCommand; + +use crate::common::startup::get_cli_exe; +use std::{path::PathBuf, process::Command}; + +pub struct VitCliCommand { + exe: PathBuf, +} + +impl Default for VitCliCommand { + fn default() -> Self { + Self::new(get_cli_exe()) + } +} + +impl VitCliCommand { + pub fn new(exe: PathBuf) -> Self { + Self { exe } + } + + pub fn api_token(self) -> ApiTokenCommand { + let mut command = Command::new(self.exe); + command.arg("api-token"); + ApiTokenCommand::new(command) + } + + pub fn db(self) -> DbCommand { + let mut command = Command::new(self.exe); + command.arg("db"); + DbCommand::new(command) + } + + pub fn csv_data(self) -> CsvDataCommand { + let mut command = Command::new(self.exe); + command.arg("csv-data"); + CsvDataCommand::new(command) + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/cli/commands/token/add.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/cli/commands/token/add.rs new file mode 100644 index 0000000000..fbda95a5e8 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/cli/commands/token/add.rs @@ -0,0 +1,30 @@ +use std::process::Command; + +pub struct ApiTokenAddCommand { + command: Command, +} + +impl ApiTokenAddCommand { + pub fn new(command: Command) -> Self { + Self { command } + } + + pub fn db_url>(mut self, db_url: S) -> Self { + self.command.arg("--db-url").arg(db_url.into()); + self + } + + pub fn tokens(mut self, tokens: &[String]) -> Self { + self = self.tokens_as_str(&tokens.join(",")); + self + } + + pub fn tokens_as_str(mut self, tokens: &str) -> Self { + self.command.arg("--tokens").arg(tokens); + self + } + + pub fn build(self) -> Command { + self.command + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/cli/commands/token/generate.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/cli/commands/token/generate.rs new file mode 100644 index 0000000000..bc45ede99a --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/cli/commands/token/generate.rs @@ -0,0 +1,25 @@ +use std::process::Command; + +pub struct ApiTokenGenerateCommand { + command: Command, +} + +impl ApiTokenGenerateCommand { + pub fn new(command: Command) -> Self { + Self { command } + } + + pub fn n(mut self, n: u32) -> Self { + self.command.arg("--n").arg(n.to_string()); + self + } + + pub fn size(mut self, size: u32) -> Self { + self.command.arg("--size").arg(size.to_string()); + self + } + + pub fn build(self) -> Command { + self.command + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/cli/commands/token/mod.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/cli/commands/token/mod.rs new file mode 100644 index 0000000000..343c96f8e2 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/cli/commands/token/mod.rs @@ -0,0 +1,26 @@ +mod add; +mod generate; + +pub use add::ApiTokenAddCommand; +pub use generate::ApiTokenGenerateCommand; +use std::process::Command; + +pub struct ApiTokenCommand { + command: Command, +} + +impl ApiTokenCommand { + pub fn new(command: Command) -> Self { + Self { command } + } + + pub fn generate(mut self) -> ApiTokenGenerateCommand { + self.command.arg("generate"); + ApiTokenGenerateCommand::new(self.command) + } + + pub fn add(mut self) -> ApiTokenAddCommand { + self.command.arg("add"); + ApiTokenAddCommand::new(self.command) + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/cli/mod.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/cli/mod.rs new file mode 100644 index 0000000000..b44e8146d5 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/cli/mod.rs @@ -0,0 +1,33 @@ +use super::startup::get_cli_exe; +use assert_cmd::assert::OutputAssertExt; +mod commands; +pub use commands::VitCliCommand; +use jortestkit::process::output_extensions::ProcessOutput; +use std::path::PathBuf; +pub struct VitCli { + exe: PathBuf, +} + +impl Default for VitCli { + fn default() -> Self { + Self::new(get_cli_exe()) + } +} + +impl VitCli { + pub fn new(exe: PathBuf) -> Self { + Self { exe } + } + + pub fn generate_tokens(&self, n: u32) -> Vec { + let vit_command: VitCliCommand = VitCliCommand::new(self.exe.clone()); + vit_command + .api_token() + .generate() + .n(n) + .build() + .assert() + .get_output() + .as_multi_line() + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/clients/mod.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/clients/mod.rs new file mode 100644 index 0000000000..c708334460 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/clients/mod.rs @@ -0,0 +1,3 @@ +pub mod rest; + +pub use rest::{Error as RestError, RawRestClient, RestClient}; diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/clients/rest/logger.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/clients/rest/logger.rs new file mode 100644 index 0000000000..b5fd2aa4d2 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/clients/rest/logger.rs @@ -0,0 +1,48 @@ +#[derive(Debug, Clone)] +pub struct RestClientLogger { + enabled: bool, +} + +impl Default for RestClientLogger { + fn default() -> Self { + Self { enabled: true } + } +} + +impl RestClientLogger { + pub fn log_request(&self, request: &str) { + if !self.is_enabled() { + return; + } + println!("Request: {:#?}", request); + } + + pub fn log_response(&self, response: &reqwest::blocking::Response) { + if !self.is_enabled() { + return; + } + println!("Response: {:#?}", response); + } + + pub fn log_text(&self, content: &str) { + if !self.is_enabled() { + return; + } + println!("Text: {:#?}", content); + } + + pub fn log_post_body(&self, content: &str) { + if !self.is_enabled() { + return; + } + println!("Post Body: {}", content); + } + + pub fn is_enabled(&self) -> bool { + self.enabled + } + + pub fn set_enabled(&mut self, enabled: bool) { + self.enabled = enabled + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/clients/rest/mod.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/clients/rest/mod.rs new file mode 100644 index 0000000000..58c2381d94 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/clients/rest/mod.rs @@ -0,0 +1,198 @@ +mod logger; +mod path; +mod raw; + +use crate::common::clients::rest::path::RestPathBuilder; +use hyper::StatusCode; +use logger::RestClientLogger; +pub use raw::RestClient as RawRestClient; +use reqwest::blocking::Response; +use std::collections::HashMap; +use thiserror::Error; +use url::Url; +use vit_servicing_station_lib_f10::db::models::challenges::Challenge; +use vit_servicing_station_lib_f10::db::models::community_advisors_reviews::AdvisorReview; +use vit_servicing_station_lib_f10::db::models::proposals::FullProposalInfo; +use vit_servicing_station_lib_f10::server::settings::ServiceSettings; +use vit_servicing_station_lib_f10::{ + db::models::{funds::Fund, proposals::Proposal}, + v0::endpoints::{proposals::ProposalVoteplanIdAndIndexes, service_version::ServiceVersion}, +}; + +#[derive(Debug, Clone)] +pub struct RestClient { + raw: RawRestClient, +} + +impl From<&ServiceSettings> for RestClient { + fn from(settings: &ServiceSettings) -> Self { + let url = { + let scheme = { + if settings.tls.cert_file.is_some() { + "https" + } else { + "http" + } + }; + //we accepted ServiceSettings struct in constructor, so address should be proper + //SockerAddr struct, therefore we won't have any problems with parsing result + format!("{}://{}", scheme, settings.address) + .parse() + .unwrap() + }; + Self::new(url) + } +} + +#[allow(clippy::from_over_into)] +impl Into for RestClient { + fn into(self) -> RawRestClient { + self.raw + } +} + +impl RestClient { + pub fn new(url: Url) -> Self { + Self { + raw: RawRestClient::new(url), + } + } + + pub fn health(&self) -> Result<(), Error> { + self.verify_status_code(&self.raw.health()?) + .map_err(|_| Error::ServerIsNotUp) + } + + pub fn funds(&self) -> Result { + let response = self.raw.funds()?; + self.verify_status_code(&response)?; + let content = response.text()?; + self.raw.log_text(&content); + serde_json::from_str(&content).map_err(|e| Error::CannotDeserializeResponse { + source: e, + text: content.clone(), + }) + } + + pub fn path_builder(&self) -> &RestPathBuilder { + self.raw.path_builder() + } + + pub fn proposal(&self, id: &str) -> Result { + let response = self.raw.proposal(id)?; + self.verify_status_code(&response)?; + let content = response.text()?; + self.raw.log_text(&content); + serde_json::from_str(&content).map_err(|e| Error::CannotDeserializeResponse { + source: e, + text: content.clone(), + }) + } + + pub fn proposals(&self) -> Result, Error> { + let response = self.raw.proposals()?; + self.verify_status_code(&response)?; + let content = response.text()?; + self.raw.log_text(&content); + if content.is_empty() { + return Ok(vec![]); + } + serde_json::from_str(&content).map_err(|e| Error::CannotDeserializeResponse { + source: e, + text: content.clone(), + }) + } + + pub fn fund(&self, id: &str) -> Result { + let response = self.raw.fund(id)?; + self.verify_status_code(&response)?; + let content = response.text()?; + self.raw.log_text(&content); + serde_json::from_str(&content).map_err(Error::CannotDeserialize) + } + + pub fn proposals_by_voteplan_id_and_index( + &self, + request: &[ProposalVoteplanIdAndIndexes], + ) -> Result, Error> { + let request_as_string = serde_json::to_string(&request)?; + serde_json::from_str( + &self + .raw + .proposals_by_voteplan_id_and_index(&request_as_string)? + .text()?, + ) + .map_err(Error::CannotDeserialize) + } + + pub fn challenges(&self) -> Result, Error> { + let response = self.raw.challenges()?; + self.verify_status_code(&response)?; + let content = response.text()?; + self.raw.log_text(&content); + serde_json::from_str(&content).map_err(Error::CannotDeserialize) + } + + pub fn genesis(&self) -> Result, Error> { + Ok(self.raw.genesis()?.bytes()?.to_vec()) + } + + pub fn service_version(&self) -> Result { + let response = self.raw.service_version()?; + self.verify_status_code(&response)?; + let content = response.text()?; + self.raw.log_text(&content); + serde_json::from_str(&content).map_err(Error::CannotDeserialize) + } + + pub fn advisor_reviews( + &self, + proposal_id: &str, + ) -> Result>, Error> { + let response = self.raw.advisor_reviews(proposal_id)?; + self.verify_status_code(&response)?; + let content = response.text()?; + self.raw.log_text(&content); + serde_json::from_str(&content).map_err(Error::CannotDeserialize) + } + + fn verify_status_code(&self, response: &Response) -> Result<(), Error> { + if !response.status().is_success() { + return Err(Error::ErrorStatusCode(response.status())); + } + Ok(()) + } + + pub fn disable_log(&mut self) { + self.raw.disable_log(); + } + + pub fn enable_log(&mut self) { + self.raw.enable_log(); + } + + pub fn set_api_token(&mut self, token: String) { + self.raw.set_api_token(token); + } + + pub fn set_origin>(&mut self, origin: S) { + self.raw.set_origin(origin); + } +} + +#[derive(Debug, Error)] +pub enum Error { + #[error("could not deserialize response {text}, due to: {source}")] + CannotDeserializeResponse { + source: serde_json::Error, + text: String, + }, + #[error("could not deserialize response")] + CannotDeserialize(#[from] serde_json::Error), + #[error("could not send reqeuest")] + RequestError(#[from] reqwest::Error), + #[error("server is not up")] + ServerIsNotUp, + #[error("Error code recieved: {0}")] + ErrorStatusCode(StatusCode), +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/clients/rest/path.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/clients/rest/path.rs new file mode 100644 index 0000000000..8e84eca10b --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/clients/rest/path.rs @@ -0,0 +1,75 @@ +use url::Url; + +#[derive(Debug, Clone)] +pub struct RestPathBuilder { + address: Url, + root: String, +} + +impl RestPathBuilder { + pub fn new(address: Url) -> Self { + RestPathBuilder { + root: "api/v0/".to_string(), + address, + } + } + + pub fn admin(self) -> Self { + Self { + address: self.address, + root: self.root + "admin/", + } + } + + pub fn proposals(&self) -> String { + self.path("proposals") + } + + pub fn funds(&self) -> String { + self.path("fund") + } + + pub fn challenges(&self) -> String { + self.path("challenges") + } + + pub fn snapshot(&self, tag: &str) -> String { + self.path(&format!("snapshot/{}", tag)) + } + + pub fn snapshot_tags(&self) -> String { + self.path("snapshot") + } + + pub fn snapshot_voting_power(&self, tag: &str, key: &str) -> String { + self.path(&format!("snapshot/{}/{}", tag, key)) + } + + pub fn proposal(&self, id: &str) -> String { + self.path(&format!("proposals/{}", id)) + } + + pub fn fund(&self, id: &str) -> String { + self.path(&format!("fund/{}", id)) + } + + pub fn advisor_reviews(&self, id: &str) -> String { + self.path(&format!("reviews/{}", id)) + } + + pub fn genesis(&self) -> String { + self.path("block0") + } + + pub fn health(&self) -> String { + self.path("health") + } + + pub fn service_version(&self) -> String { + format!("{}{}{}", self.address, "api/", "vit-version") + } + + pub fn path(&self, path: &str) -> String { + format!("{}{}{}", self.address, self.root, path) + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/clients/rest/raw.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/clients/rest/raw.rs new file mode 100644 index 0000000000..c2a8077f34 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/clients/rest/raw.rs @@ -0,0 +1,179 @@ +use reqwest::blocking::Response; + +use super::Error; +use super::{RestClientLogger, RestPathBuilder}; +use url::Url; +use vit_servicing_station_lib_f10::v0::api_token::API_TOKEN_HEADER; + +#[derive(Debug, Clone)] +pub struct RestClient { + path_builder: RestPathBuilder, + api_token: Option, + logger: RestClientLogger, + origin: Option, +} + +const ORIGIN: &str = "Origin"; + +impl RestClient { + pub fn new(url: Url) -> Self { + Self { + api_token: None, + path_builder: RestPathBuilder::new(url), + logger: RestClientLogger::default(), + origin: None, + } + } + + pub fn health(&self) -> Result { + self.get(&self.path_builder.health()) + .map_err(Error::RequestError) + } + + pub fn funds(&self) -> Result { + self.get(&self.path_builder.funds()) + .map_err(Error::RequestError) + } + + pub fn proposals(&self) -> Result { + self.get(&self.path_builder.proposals()) + .map_err(Error::RequestError) + } + + pub fn put_snapshot(&self, tag: &str, content: String) -> Result { + self.put(&self.path_builder.clone().admin().snapshot(tag), content) + .map_err(Error::RequestError) + } + + pub fn snapshot_tags(&self) -> Result { + self.get(&self.path_builder.snapshot_tags()) + .map_err(Error::RequestError) + } + + pub fn voting_power(&self, tag: &str, key: &str) -> Result { + self.get(&self.path_builder.snapshot_voting_power(tag, key)) + .map_err(Error::RequestError) + } + + pub fn proposal(&self, id: &str) -> Result { + self.get(&self.path_builder().proposal(id)) + .map_err(Error::RequestError) + } + + pub fn proposals_by_voteplan_id_and_index( + &self, + request_as_string: &str, + ) -> Result { + self.post( + &self.path_builder().proposals(), + request_as_string.to_string(), + ) + .map_err(Error::RequestError) + } + + pub fn fund(&self, id: &str) -> Result { + self.get(&self.path_builder().fund(id)) + .map_err(Error::RequestError) + } + + pub fn challenges(&self) -> Result { + self.get(&self.path_builder().challenges()) + .map_err(Error::RequestError) + } + + pub fn genesis(&self) -> Result { + self.get(&self.path_builder.genesis()) + .map_err(Error::RequestError) + } + + pub fn service_version(&self) -> Result { + self.get(&self.path_builder.service_version()) + .map_err(Error::RequestError) + } + + pub fn advisor_reviews(&self, proposal_id: &str) -> Result { + self.get(&self.path_builder.advisor_reviews(proposal_id)) + .map_err(Error::RequestError) + } + + pub fn client(&self) -> Result { + reqwest::blocking::Client::builder() + .danger_accept_invalid_certs(true) + .build() + .map_err(Into::into) + } + + pub fn set_api_token(&mut self, token: String) { + self.api_token = Some(token); + } + + pub fn set_origin>(&mut self, origin: S) { + self.origin = Some(origin.into()); + } + + pub fn disable_log(&mut self) { + self.logger.set_enabled(false); + } + + pub fn enable_log(&mut self) { + self.logger.set_enabled(true); + } + + pub fn log_response(&self, response: &Response) { + self.logger.log_response(response); + } + + pub fn log_text(&self, content: &str) { + self.logger.log_text(content); + } + + pub fn path_builder(&self) -> &RestPathBuilder { + &self.path_builder + } + + fn post( + &self, + path: &str, + data: String, + ) -> Result { + self.logger.log_post_body(&data); + + let mut res = self.client()?.post(path).body(String::into_bytes(data)); + + if let Some(api_token) = &self.api_token { + res = res.header(API_TOKEN_HEADER, api_token.to_string()); + } + let response = res.send()?; + Ok(response) + } + + fn get(&self, path: &str) -> Result { + self.logger.log_request(path); + let mut res = self.client()?.get(path); + + if let Some(api_token) = &self.api_token { + res = res.header(API_TOKEN_HEADER, api_token.to_string()); + } + if let Some(origin) = &self.origin { + res = res.header(ORIGIN, origin.to_string()); + } + let response = res.send()?; + self.logger.log_response(&response); + Ok(response) + } + + fn put(&self, path: &str, body: String) -> Result { + self.logger.log_request(path); + let mut res = self.client()?.put(path).body(body); + + if let Some(api_token) = &self.api_token { + res = res.header(API_TOKEN_HEADER, api_token.to_string()); + } + if let Some(origin) = &self.origin { + res = res.header(ORIGIN, origin.to_string()); + } + let response = res.send()?; + self.logger.log_response(&response); + Ok(response) + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/csv_converter.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/csv_converter.rs new file mode 100644 index 0000000000..8e1cfe471b --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/csv_converter.rs @@ -0,0 +1,331 @@ +use jortestkit::csv::CsvFileBuilder; +use std::path::Path; +use thiserror::Error; +use time::format_description::well_known::Rfc3339; +use vit_servicing_station_lib_f10::db::models::community_advisors_reviews::AdvisorReview; +use vit_servicing_station_lib_f10::db::models::goals::InsertGoal; +use vit_servicing_station_lib_f10::db::models::proposals::{ + FullProposalInfo, ProposalChallengeInfo, +}; +use vit_servicing_station_lib_f10::{ + db::models::{challenges::Challenge, funds::Fund, voteplans::Voteplan}, + utils::datetime::unix_timestamp_to_datetime, +}; + +#[derive(Debug, Error)] +pub enum Error { + #[error("Cannot format csv file with funds due to : {0}")] + CannotBuildCsvWithFunds(String), +} + +pub struct CsvConverter; + +impl CsvConverter { + pub fn funds>(&self, funds: Vec, path: P) -> Result<(), Error> { + let headers = vec![ + "id", + "fund_name", + "voting_power_threshold", + "fund_goal", + "fund_start_time", + "fund_end_time", + "next_fund_start_time", + "registration_snapshot_time", + "next_registration_snapshot_time", + "insight_sharing_start", + "proposal_submission_start", + "refine_proposals_start", + "finalize_proposals_start", + "proposal_assessment_start", + "assessment_qa_start", + "snapshot_start", + "voting_start", + "voting_end", + "tallying_end", + "results_url", + "survey_url", + ]; + let content: Vec> = funds.iter().map(convert_fund).collect(); + self.build_file(headers, content, path) + } + + pub fn voteplans>( + &self, + voteplans: Vec, + path: P, + ) -> Result<(), Error> { + let headers = vec![ + "id", + "chain_voteplan_id", + "chain_vote_start_time", + "chain_vote_end_time", + "chain_committee_end_time", + "chain_voteplan_payload", + "chain_vote_encryption_key", + "fund_id", + ]; + let content: Vec> = voteplans.iter().map(convert_voteplan).collect(); + self.build_file(headers, content, path) + } + + pub fn proposals>( + &self, + proposals: Vec, + path: P, + ) -> Result<(), Error> { + let headers = vec![ + "internal_id", + "category_name", + "proposal_id", + "proposal_title", + "proposal_summary", + "proposal_url", + "proposal_files_url", + "proposal_public_key", + "proposal_funds", + "proposal_impact_score", + "proposer_email", + "proposer_name", + "proposer_url", + "proposer_relevant_experience", + "chain_proposal_id", + "chain_proposal_index", + "chain_vote_options", + "chain_vote_type", + "chain_vote_action", + "id", + "chain_voteplan_id", + "chain_vote_start_time", + "chain_vote_end_time", + "chain_committe", + "challenge_id", + "proposal_solution", + "proposal_brief", + "proposal_importance", + "proposal_goal", + "proposal_metrics", + ]; + + let content: Vec> = proposals.iter().map(convert_proposal).collect(); + self.build_file(headers, content, path) + } + + pub fn challenges>( + &self, + challenges: Vec, + path: P, + ) -> Result<(), Error> { + let headers = vec![ + "id", + "challenge_type", + "title", + "description", + "rewards_total", + "proposers_rewards", + "fund_id", + "challenge_url", + ]; + + let content: Vec> = challenges.iter().map(convert_challenge).collect(); + self.build_file(headers, content, path) + } + + pub fn advisor_reviews>( + &self, + challenges: Vec, + path: P, + ) -> Result<(), Error> { + let headers = vec![ + "id", + "proposal_id", + "assessor", + "impact_alignment_rating_given", + "impact_alignment_note", + "feasibility_rating_given", + "feasibility_note", + "auditability_rating_given", + "auditability_note", + "excellent", + "good", + ]; + + let content: Vec> = challenges.iter().map(convert_advisor_review).collect(); + self.build_file(headers, content, path) + } + + pub fn goals>(&self, goals: Vec, path: P) -> Result<(), Error> { + let headers = vec!["goal_name", "fund_id"]; + + let content: Vec> = goals.iter().map(convert_goals).collect(); + self.build_file(headers, content, path) + } + + fn build_file>( + &self, + headers: Vec<&str>, + content: Vec>, + path: P, + ) -> Result<(), Error> { + let mut csv_loader: CsvFileBuilder = CsvFileBuilder::from_path(path); + csv_loader + .with_header(headers) + .with_contents(content) + .build() + .map_err(|e| Error::CannotBuildCsvWithFunds(e.to_string())) + } +} + +fn convert_proposal(proposal: &FullProposalInfo) -> Vec { + let (solution, brief, importance, goal, metrics) = match &proposal.challenge_info { + ProposalChallengeInfo::Simple(data) => ( + data.proposal_solution.clone(), + "".to_string(), + "".to_string(), + "".to_string(), + "".to_string(), + ), + ProposalChallengeInfo::CommunityChoice(data) => ( + "".to_string(), + data.proposal_brief.clone(), + data.proposal_importance.clone(), + data.proposal_goal.clone(), + data.proposal_metrics.clone(), + ), + }; + let proposal = &proposal.proposal; + + vec![ + proposal.internal_id.to_string(), + proposal.proposal_category.category_name.to_string(), + proposal.proposal_id.to_string(), + proposal.proposal_title.to_string(), + proposal.proposal_summary.to_string(), + proposal.proposal_url.to_string(), + proposal.proposal_files_url.to_string(), + proposal.proposal_public_key.to_string(), + proposal.proposal_funds.to_string(), + proposal.proposal_impact_score.to_string(), + proposal.proposer.proposer_email.to_string(), + proposal.proposer.proposer_name.to_string(), + proposal.proposer.proposer_url.to_string(), + proposal.proposer.proposer_relevant_experience.to_string(), + std::str::from_utf8(&proposal.chain_proposal_id) + .unwrap() + .to_string(), + proposal.chain_proposal_index.to_string(), + proposal.chain_vote_options.as_csv_string(), + proposal.chain_voteplan_payload.to_string(), + "off_chain".to_string(), + proposal.proposal_id.to_string(), + proposal.chain_voteplan_id.to_string(), + unix_timestamp_to_rfc3339(proposal.chain_vote_start_time), + unix_timestamp_to_rfc3339(proposal.chain_vote_end_time), + unix_timestamp_to_rfc3339(proposal.chain_committee_end_time), + proposal.challenge_id.to_string(), + solution, + brief, + importance, + goal, + metrics, + ] +} + +fn convert_fund(fund: &Fund) -> Vec { + // destructure the object to get a compile-time exhaustivity check, even if we already have + // tests for this, it's easier to keep it up-to-date + let Fund { + id, + fund_name, + fund_goal, + voting_power_threshold, + fund_start_time, + fund_end_time, + next_fund_start_time, + registration_snapshot_time, + next_registration_snapshot_time, + chain_vote_plans: _, + challenges: _, + stage_dates, + goals: _, + results_url, + survey_url, + } = fund; + + // TODO: can we leverage serde to build these vectors? + vec![ + id.to_string(), + fund_name.to_string(), + voting_power_threshold.to_string(), + fund_goal.to_string(), + unix_timestamp_to_rfc3339(*fund_start_time), + unix_timestamp_to_rfc3339(*fund_end_time), + unix_timestamp_to_rfc3339(*next_fund_start_time), + unix_timestamp_to_rfc3339(*registration_snapshot_time), + unix_timestamp_to_rfc3339(*next_registration_snapshot_time), + unix_timestamp_to_rfc3339(stage_dates.insight_sharing_start), + unix_timestamp_to_rfc3339(stage_dates.proposal_submission_start), + unix_timestamp_to_rfc3339(stage_dates.refine_proposals_start), + unix_timestamp_to_rfc3339(stage_dates.finalize_proposals_start), + unix_timestamp_to_rfc3339(stage_dates.proposal_assessment_start), + unix_timestamp_to_rfc3339(stage_dates.assessment_qa_start), + unix_timestamp_to_rfc3339(stage_dates.snapshot_start), + unix_timestamp_to_rfc3339(stage_dates.voting_start), + unix_timestamp_to_rfc3339(stage_dates.voting_end), + unix_timestamp_to_rfc3339(stage_dates.tallying_end), + results_url.to_string(), + survey_url.to_string(), + ] +} + +fn convert_voteplan(voteplan: &Voteplan) -> Vec { + vec![ + voteplan.id.to_string(), + voteplan.chain_voteplan_id.to_string(), + unix_timestamp_to_rfc3339(voteplan.chain_vote_start_time), + unix_timestamp_to_rfc3339(voteplan.chain_vote_end_time), + unix_timestamp_to_rfc3339(voteplan.chain_committee_end_time), + voteplan.chain_voteplan_payload.to_string(), + voteplan.chain_vote_encryption_key.to_string(), + voteplan.fund_id.to_string(), + ] +} + +fn convert_challenge(challenge: &Challenge) -> Vec { + vec![ + challenge.id.to_string(), + challenge.challenge_type.to_string(), + challenge.title.clone(), + challenge.description.clone(), + challenge.rewards_total.to_string(), + challenge.proposers_rewards.to_string(), + challenge.fund_id.to_string(), + challenge.challenge_url.clone(), + ] +} + +fn convert_advisor_review(review: &AdvisorReview) -> Vec { + vec![ + review.id.to_string(), + review.proposal_id.to_string(), + review.assessor.to_string(), + review.impact_alignment_rating_given.to_string(), + review.impact_alignment_note.to_string(), + review.feasibility_rating_given.to_string(), + review.feasibility_note.to_string(), + review.auditability_rating_given.to_string(), + review.auditability_note.to_string(), + (review.ranking as u8 == 0).to_string(), + (review.ranking as u8 == 1).to_string(), + ] +} + +fn convert_goals(goal: &InsertGoal) -> Vec { + let InsertGoal { goal_name, fund_id } = goal; + vec![goal_name.to_string(), fund_id.to_string()] +} + +fn unix_timestamp_to_rfc3339(timestamp: i64) -> String { + unix_timestamp_to_datetime(timestamp) + .format(&Rfc3339) + .unwrap() +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/arbitrary/generator.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/arbitrary/generator.rs new file mode 100644 index 0000000000..ea3f519501 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/arbitrary/generator.rs @@ -0,0 +1,116 @@ +use crate::common::data::{CurrentFund, ValidVotePlanParameters}; +use chain_impl_mockchain::testing::scenario::template::ProposalDefBuilder; +use chain_impl_mockchain::testing::scenario::template::VotePlanDef; +use chain_impl_mockchain::testing::scenario::template::VotePlanDefBuilder; +use fake::faker::name::en::Name; +use fake::Fake; +use rand::{rngs::OsRng, RngCore}; +use std::{collections::HashMap, iter}; +use time::{Duration, OffsetDateTime}; +use vit_servicing_station_lib_f10::{ + db::models::api_tokens::ApiTokenData, v0::api_token::ApiToken, +}; + +#[derive(Clone)] +pub struct ArbitraryGenerator { + id_generator: OsRng, +} + +impl Default for ArbitraryGenerator { + fn default() -> Self { + ArbitraryGenerator::new() + } +} + +impl ArbitraryGenerator { + pub fn new() -> Self { + Self { + id_generator: OsRng, + } + } + + pub fn random_index(&mut self, limit: usize) -> usize { + (self.id_generator.next_u32() as usize) % limit + } + + pub fn random_size(&mut self) -> usize { + (self.id_generator.next_u32() as usize) % 100 + 1 + } + + pub fn bytes(&mut self) -> [u8; 32] { + let mut random_bytes: [u8; 32] = [0; 32]; + self.id_generator.fill_bytes(&mut random_bytes); + random_bytes + } + + pub fn next_u32(&mut self) -> u32 { + self.id_generator.next_u32() + } + + pub fn next_u64(&mut self) -> u64 { + self.id_generator.next_u64() + } + + pub fn token_hash(&mut self) -> String { + base64::encode_config(self.bytes(), base64::URL_SAFE_NO_PAD) + } + + pub fn id(&mut self) -> i32 { + self.id_generator.next_u32() as i32 + } + + pub fn token(&mut self) -> (String, ApiTokenData) { + let data = self.bytes().to_vec(); + let token_creation_time = OffsetDateTime::now_utc() - Duration::days(1); + let toket_expiry_time = OffsetDateTime::now_utc() + Duration::days(1); + + let token_data = ApiTokenData { + token: ApiToken::new(data.clone()), + creation_time: token_creation_time.unix_timestamp(), + expire_time: toket_expiry_time.unix_timestamp(), + }; + ( + base64::encode_config(data, base64::URL_SAFE_NO_PAD), + token_data, + ) + } + + pub fn tokens(&mut self) -> HashMap { + let size = self.random_size() % 10 + 2; + iter::from_fn(|| Some(self.token())).take(size).collect() + } + + pub fn hash(&mut self) -> String { + let mut hash = [0u8; 32]; + self.id_generator.fill_bytes(&mut hash); + base64::encode(hash) + } + + pub fn vote_plan_def(&mut self) -> VotePlanDef { + let mut vote_plan_builder = VotePlanDefBuilder::new("fund_x"); + vote_plan_builder.owner(&Name().fake::()); + vote_plan_builder.vote_phases(1, 2, 3); + + for _ in 0..(self.next_u32() % 245 + 10) { + let mut proposal_builder = ProposalDefBuilder::new( + chain_impl_mockchain::testing::VoteTestGen::external_proposal_id(), + ); + proposal_builder.options(3); + proposal_builder.action_off_chain(); + vote_plan_builder.with_proposal(&mut proposal_builder); + } + + vote_plan_builder.build() + } + + pub fn vote_plan_def_collection(&mut self) -> Vec { + let len = (self.next_u32() % 10 + 1) as usize; + std::iter::from_fn(|| Some(self.vote_plan_def())) + .take(len) + .collect() + } + + pub fn valid_vote_plan_parameters(&mut self) -> ValidVotePlanParameters { + CurrentFund::new(self.vote_plan_def_collection(), Default::default()).into() + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/arbitrary/mod.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/arbitrary/mod.rs new file mode 100644 index 0000000000..e76fc8fddd --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/arbitrary/mod.rs @@ -0,0 +1,5 @@ +mod generator; +mod snapshot_generator; + +pub use generator::ArbitraryGenerator; +pub use snapshot_generator::ArbitrarySnapshotGenerator; diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/arbitrary/snapshot_generator.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/arbitrary/snapshot_generator.rs new file mode 100644 index 0000000000..c613fb7802 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/arbitrary/snapshot_generator.rs @@ -0,0 +1,370 @@ +use crate::common::data::ArbitraryGenerator; +use crate::common::data::ArbitraryValidVotingTemplateGenerator; +use crate::common::data::{Snapshot, ValidVotingTemplateGenerator}; +use std::iter; +use time::{Duration, OffsetDateTime}; +use vit_servicing_station_lib_f10::db::models::funds::FundStageDates; +use vit_servicing_station_lib_f10::db::models::goals::Goal; +use vit_servicing_station_lib_f10::db::models::{ + api_tokens::ApiTokenData, + challenges::Challenge, + funds::Fund, + proposals::{ChallengeType, Proposal}, + voteplans::Voteplan, +}; + +use vit_servicing_station_lib_f10::db::models::community_advisors_reviews::AdvisorReview; +use vit_servicing_station_lib_f10::db::models::proposals::FullProposalInfo; + +struct FundDateTimes { + start: OffsetDateTime, + end: OffsetDateTime, + next: OffsetDateTime, + snapshot: OffsetDateTime, + next_snapshot: OffsetDateTime, + insight_sharing_start: OffsetDateTime, + proposal_submission_start: OffsetDateTime, + refine_proposals_start: OffsetDateTime, + finalize_proposals_start: OffsetDateTime, + proposal_assessment_start: OffsetDateTime, + assessment_qa_start: OffsetDateTime, + snapshot_start: OffsetDateTime, + voting_start: OffsetDateTime, + voting_end: OffsetDateTime, + tallying_end: OffsetDateTime, +} + +struct VoteplanDateTimes { + start: OffsetDateTime, + end: OffsetDateTime, + tally: OffsetDateTime, +} + +#[derive(Clone)] +pub struct ArbitrarySnapshotGenerator { + id_generator: ArbitraryGenerator, + template_generator: ArbitraryValidVotingTemplateGenerator, +} + +impl Default for ArbitrarySnapshotGenerator { + fn default() -> Self { + Self { + id_generator: ArbitraryGenerator::new(), + template_generator: ArbitraryValidVotingTemplateGenerator::new(), + } + } +} + +impl ArbitrarySnapshotGenerator { + pub fn funds(&mut self) -> Vec { + let size = self.id_generator.random_size(); + iter::from_fn(|| Some(self.gen_single_fund())) + .take(size) + .collect() + } + + fn gen_single_fund(&mut self) -> Fund { + let id = self.id_generator.id().abs(); + let dates = self.fund_date_times(); + let fund = ValidVotingTemplateGenerator::next_fund(&mut self.template_generator); + + Fund { + id, + fund_name: format!("Fund{}", id), + fund_goal: fund.goal, + fund_start_time: dates.start.unix_timestamp(), + voting_power_threshold: fund.threshold.unwrap().into(), + fund_end_time: dates.end.unix_timestamp(), + next_fund_start_time: dates.next.unix_timestamp(), + registration_snapshot_time: dates.snapshot.unix_timestamp(), + next_registration_snapshot_time: dates.next_snapshot.unix_timestamp(), + chain_vote_plans: vec![self.voteplan_with_fund_id(id.abs())], + challenges: self.challenges_with_fund_id(id.abs()), + stage_dates: FundStageDates { + insight_sharing_start: dates.insight_sharing_start.unix_timestamp(), + proposal_submission_start: dates.proposal_submission_start.unix_timestamp(), + refine_proposals_start: dates.refine_proposals_start.unix_timestamp(), + finalize_proposals_start: dates.finalize_proposals_start.unix_timestamp(), + proposal_assessment_start: dates.proposal_assessment_start.unix_timestamp(), + assessment_qa_start: dates.assessment_qa_start.unix_timestamp(), + snapshot_start: dates.snapshot_start.unix_timestamp(), + voting_start: dates.voting_start.unix_timestamp(), + voting_end: dates.voting_end.unix_timestamp(), + tallying_end: dates.tallying_end.unix_timestamp(), + }, + goals: vec![Goal { + id: 1, + goal_name: "goal1".into(), + fund_id: id.abs(), + }], + results_url: format!("http://localhost/fund/{id}/results/"), + survey_url: format!("http://localhost/fund/{id}/survey/"), + } + } + + fn gen_single_proposal(&mut self, fund: &Fund) -> FullProposalInfo { + let id = self.id_generator.next_u32() as i32; + let proposal = ValidVotingTemplateGenerator::next_proposal(&mut self.template_generator); + let voteplan = fund.chain_vote_plans.first().unwrap(); + let challenge = fund.challenges.first().unwrap(); + let challenge_id = challenge.id; + let challenge_info = self + .template_generator + .proposals_challenge_info(&challenge.challenge_type); + let proposal = Proposal { + internal_id: id.abs(), + proposal_id: id.abs().to_string(), + proposal_category: self.template_generator.proposal_category(), + proposal_title: proposal.proposal_title, + proposal_summary: proposal.proposal_summary, + proposal_public_key: self.id_generator.hash(), + proposal_funds: proposal.proposal_funds.parse().unwrap(), + proposal_url: proposal.proposal_url, + proposal_impact_score: proposal.proposal_impact_score.parse().unwrap(), + reviews_count: 0, + proposal_files_url: proposal.files_url, + proposer: self.template_generator.proposer(), + chain_proposal_id: self.id_generator.hash().as_bytes().to_vec(), + chain_proposal_index: self.id_generator.next_u32() as i64, + chain_vote_options: proposal.chain_vote_options, + chain_voteplan_id: fund + .chain_vote_plans + .get(0) + .unwrap() + .chain_voteplan_id + .clone(), + chain_vote_start_time: voteplan.chain_vote_start_time, + chain_vote_end_time: voteplan.chain_vote_end_time, + chain_committee_end_time: voteplan.chain_committee_end_time, + chain_voteplan_payload: voteplan.chain_voteplan_payload.clone(), + chain_vote_encryption_key: voteplan.chain_vote_encryption_key.clone(), + fund_id: fund.id, + challenge_id, + }; + + FullProposalInfo { + proposal, + challenge_info, + challenge_type: challenge.challenge_type.clone(), + } + } + + fn fund_date_times(&self) -> FundDateTimes { + let range_start_time = OffsetDateTime::now_utc() - Duration::days(10); + let range_end_time = OffsetDateTime::now_utc() + Duration::days(10); + let range_next_start_time = range_end_time + Duration::days(10); + let start = rand_datetime_in_range(range_start_time, OffsetDateTime::now_utc()); + let end = rand_datetime_in_range(OffsetDateTime::now_utc(), range_end_time); + let next = rand_datetime_in_range(range_end_time, range_next_start_time); + let snapshot = rand_datetime_in_range(start, end); + let next_snapshot = rand_datetime_in_range(end, end + Duration::days(30)); + + let insight_sharing_start = rand_datetime_in_range(start, end); + let proposal_submission_start = rand_datetime_in_range(insight_sharing_start, end); + let refine_proposals_start = rand_datetime_in_range(proposal_submission_start, end); + let finalize_proposals_start = rand_datetime_in_range(refine_proposals_start, end); + let proposal_assessment_start = rand_datetime_in_range(finalize_proposals_start, end); + let assessment_qa_start = rand_datetime_in_range(finalize_proposals_start, end); + let snapshot_start = rand_datetime_in_range(assessment_qa_start, end); + let voting_start = rand_datetime_in_range(snapshot_start, end); + let voting_end = rand_datetime_in_range(voting_start, end); + let tallying_end = rand_datetime_in_range(voting_end, end); + + FundDateTimes { + start, + end, + next, + snapshot, + next_snapshot, + insight_sharing_start, + proposal_submission_start, + refine_proposals_start, + finalize_proposals_start, + proposal_assessment_start, + assessment_qa_start, + snapshot_start, + voting_start, + voting_end, + tallying_end, + } + } + + fn voteplan_date_times(&self) -> VoteplanDateTimes { + let range_start_time = OffsetDateTime::now_utc() - Duration::days(10); + let range_end_time = OffsetDateTime::now_utc() + Duration::days(10); + let range_tally_time = range_end_time + Duration::days(10); + let start = rand_datetime_in_range(range_start_time, OffsetDateTime::now_utc()); + let end = rand_datetime_in_range(OffsetDateTime::now_utc(), range_end_time); + let tally = rand_datetime_in_range(range_end_time, range_tally_time); + VoteplanDateTimes { start, end, tally } + } + + pub fn voteplans(&mut self, funds: &[Fund]) -> Vec { + funds + .iter() + .map(|x| self.voteplan_with_fund_id(x.id)) + .collect() + } + + pub fn challenges(&mut self, funds: &[Fund]) -> Vec { + funds + .iter() + .map(|x| x.challenges.first().unwrap()) + .cloned() + .collect() + } + + pub fn token(&mut self) -> (String, ApiTokenData) { + self.id_generator.token() + } + + pub fn proposals(&mut self, funds: &[Fund]) -> Vec { + funds.iter().map(|x| self.gen_single_proposal(x)).collect() + } + + pub fn advisor_reviews(&mut self, funds: &[FullProposalInfo]) -> Vec { + funds + .iter() + .map(|x| self.review_with_proposal_id(x.proposal.internal_id)) + .collect() + } + + pub fn goals(&mut self, funds: &[Fund]) -> Vec { + funds + .iter() + .enumerate() + .map(|(i, f)| Goal { + id: i as i32, + goal_name: format!("goal{i}"), + fund_id: f.id, + }) + .collect() + } + + pub fn voteplan_with_fund_id(&mut self, fund_id: i32) -> Voteplan { + let id = self.id_generator.next_u32() as i32; + let dates = self.voteplan_date_times(); + + Voteplan { + id: id.abs(), + chain_voteplan_id: self.id_generator.hash(), + chain_vote_start_time: dates.start.unix_timestamp(), + chain_vote_end_time: dates.end.unix_timestamp(), + chain_committee_end_time: dates.tally.unix_timestamp(), + chain_voteplan_payload: "public".to_string(), + chain_vote_encryption_key: "".to_string(), + fund_id, + } + } + + pub fn challenges_with_fund_id(&mut self, fund_id: i32) -> Vec { + let simple_id = self.id_generator.next_u32() as i32; + let community_choice_id = self.id_generator.next_u32() as i32; + + let first_challenge = self.template_generator.next_challenge(); + let second_challenge = self.template_generator.next_challenge(); + + vec![ + Challenge { + internal_id: first_challenge.internal_id, + id: simple_id.abs(), + challenge_type: ChallengeType::Simple, + title: first_challenge.title, + description: first_challenge.description, + rewards_total: first_challenge.rewards_total.parse().unwrap(), + proposers_rewards: first_challenge.proposers_rewards.parse().unwrap(), + fund_id, + challenge_url: self.template_generator.gen_http_address(), + highlights: self.template_generator.gen_highlights(), + }, + Challenge { + internal_id: second_challenge.internal_id, + id: community_choice_id.abs(), + challenge_type: ChallengeType::CommunityChoice, + title: second_challenge.title, + description: second_challenge.description, + rewards_total: second_challenge.rewards_total.parse().unwrap(), + proposers_rewards: second_challenge.proposers_rewards.parse().unwrap(), + fund_id, + challenge_url: self.template_generator.gen_http_address(), + highlights: self.template_generator.gen_highlights(), + }, + ] + } + + pub fn challenge_with_fund_id(&mut self, fund_id: i32) -> Challenge { + let id = self.id_generator.next_u32() as i32; + let challenge = self.template_generator.next_challenge(); + + Challenge { + internal_id: challenge.internal_id, + id: id.abs(), + challenge_type: ChallengeType::CommunityChoice, + title: challenge.title, + description: challenge.description, + rewards_total: challenge.rewards_total.parse().unwrap(), + proposers_rewards: challenge.proposers_rewards.parse().unwrap(), + fund_id, + challenge_url: self.template_generator.gen_http_address(), + highlights: challenge.highlight, + } + } + + pub fn goals_with_fund_id(&mut self, fund_id: i32) -> Vec { + let id = (self.id_generator.next_u32() % (i32::MAX as u32)) as i32; + + vec![ + Goal { + fund_id, + id, + goal_name: "goal1".into(), + }, + Goal { + fund_id, + id, + goal_name: "goal2".into(), + }, + ] + } + + pub fn review_with_proposal_id(&mut self, proposal_id: i32) -> AdvisorReview { + let id = self.id_generator.next_u32() as i32; + let review = (self.template_generator).next_review(); + AdvisorReview { + id, + proposal_id, + assessor: review.assessor, + impact_alignment_rating_given: review.impact_alignment_rating_given, + impact_alignment_note: review.impact_alignment_note, + feasibility_rating_given: review.feasibility_rating_given, + feasibility_note: review.feasibility_note, + auditability_rating_given: review.auditability_rating_given, + auditability_note: review.auditability_note, + ranking: review.ranking, + } + } + + pub fn snapshot(&mut self) -> Snapshot { + let funds = self.funds(); + let voteplans = self.voteplans(&funds); + let challenges = self.challenges(&funds); + let proposals = self.proposals(&funds); + let reviews = self.advisor_reviews(&proposals); + let goals = self.goals(&funds); + let tokens = self.id_generator.tokens(); + + Snapshot::new( + funds, proposals, challenges, tokens, voteplans, reviews, goals, + ) + } +} + +fn rand_datetime_in_range(left: OffsetDateTime, right: OffsetDateTime) -> OffsetDateTime { + use rand::Rng; + let left_timestamp = left.unix_timestamp(); + let right_timestamp = right.unix_timestamp(); + OffsetDateTime::from_unix_timestamp( + rand::thread_rng().gen_range(left_timestamp, right_timestamp), + ) + .unwrap() +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/mod.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/mod.rs new file mode 100644 index 0000000000..79eae88b20 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/mod.rs @@ -0,0 +1,7 @@ +mod arbitrary; +mod snapshot; +mod voting; + +pub use arbitrary::{ArbitraryGenerator, ArbitrarySnapshotGenerator}; +pub use snapshot::Snapshot; +pub use voting::*; diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/snapshot.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/snapshot.rs new file mode 100644 index 0000000000..794d889381 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/snapshot.rs @@ -0,0 +1,107 @@ +use std::collections::HashMap; +use vit_servicing_station_lib_f10::db::models::community_advisors_reviews::AdvisorReview; +use vit_servicing_station_lib_f10::db::models::goals::Goal; +use vit_servicing_station_lib_f10::db::models::proposals::FullProposalInfo; +use vit_servicing_station_lib_f10::db::models::{ + api_tokens::ApiTokenData, challenges::Challenge, funds::Fund, voteplans::Voteplan, +}; + +#[derive(Debug, Clone)] +pub struct Snapshot { + funds: Vec, + proposals: Vec, + challenges: Vec, + tokens: HashMap, + voteplans: Vec, + reviews: Vec, + goals: Vec, +} + +impl Snapshot { + pub fn new( + funds: Vec, + proposals: Vec, + challenges: Vec, + tokens: HashMap, + voteplans: Vec, + reviews: Vec, + goals: Vec, + ) -> Self { + Self { + funds, + proposals, + challenges, + tokens, + voteplans, + reviews, + goals, + } + } + + pub fn funds(&self) -> Vec { + self.funds.clone() + } + + pub fn proposals(&self) -> Vec { + self.proposals.clone() + } + + pub fn tokens(&self) -> HashMap { + self.tokens.clone() + } + + pub fn voteplans(&self) -> Vec { + self.voteplans.clone() + } + + pub fn funds_mut(&mut self) -> &mut Vec { + &mut self.funds + } + + pub fn proposals_mut(&mut self) -> &mut Vec { + &mut self.proposals + } + + pub fn voteplans_mut(&mut self) -> &mut Vec { + &mut self.voteplans + } + + pub fn proposal_by_id(&self, id: &str) -> Option<&FullProposalInfo> { + self.proposals + .iter() + .find(|x| x.proposal.proposal_id.eq(id)) + } + + pub fn fund_by_id(&self, id: i32) -> Option<&Fund> { + self.funds.iter().find(|x| x.id == id) + } + + pub fn any_token(&self) -> (String, ApiTokenData) { + let (hash, token) = self.tokens.iter().next().unwrap(); + (hash.to_string(), token.clone()) + } + + pub fn token_hash(&self) -> String { + self.any_token().0 + } + + pub fn challenges(&self) -> Vec { + self.challenges.clone() + } + + pub fn challenges_mut(&mut self) -> &mut Vec { + &mut self.challenges + } + + pub fn advisor_reviews(&self) -> Vec { + self.reviews.clone() + } + + pub fn advisor_reviews_mut(&mut self) -> &mut Vec { + &mut self.reviews + } + + pub fn goals(&self) -> Vec { + self.goals.clone() + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/voting/builder/challenge.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/voting/builder/challenge.rs new file mode 100644 index 0000000000..53ff4bf440 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/voting/builder/challenge.rs @@ -0,0 +1,35 @@ +use super::ProposalConfig; +use std::collections::VecDeque; + +#[derive(Debug, Clone, Default)] +pub struct ChallengeConfig { + pub(crate) proposals: VecDeque, + pub(crate) rewards_total: Option, + pub(crate) proposers_rewards: Option, +} + +impl ChallengeConfig { + pub fn proposals(mut self, proposals: Vec) -> Self { + self.proposals = VecDeque::from(proposals); + self + } + + pub fn proposals_len(&self) -> usize { + self.proposals.len() + } + + pub fn proposal(mut self, proposal: ProposalConfig) -> Self { + self.proposals.push_back(proposal); + self + } + + pub fn rewards_total(mut self, rewards_total: u64) -> Self { + self.rewards_total = Some(rewards_total); + self + } + + pub fn proposers_rewards(mut self, proposers_rewards: u64) -> Self { + self.proposers_rewards = Some(proposers_rewards); + self + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/voting/builder/mod.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/voting/builder/mod.rs new file mode 100644 index 0000000000..e5b2736a2b --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/voting/builder/mod.rs @@ -0,0 +1,139 @@ +mod challenge; +mod proposal; + +use crate::common::data::ArbitraryValidVotingTemplateGenerator; +use crate::common::data::ChallengeTemplate; +use crate::common::data::FundTemplate; +use crate::common::data::ProposalTemplate; +use crate::common::data::ReviewTemplate; +use crate::common::data::ValidVotingTemplateGenerator; +pub use challenge::ChallengeConfig; +pub use proposal::ProposalConfig; + +#[derive(Clone, Default)] +pub struct ArbitraryValidVotePlanConfig { + template_generator: ArbitraryValidVotingTemplateGenerator, + challenges: Vec, +} + +impl ArbitraryValidVotePlanConfig { + pub fn challenges(mut self, challenges: Vec) -> Self { + for challenge in challenges.into_iter() { + self = self.challenge(challenge); + } + self + } + + pub fn get_challenges(&self) -> &[ChallengeConfig] { + &self.challenges + } + + pub fn challenge(mut self, mut challenge: ChallengeConfig) -> Self { + challenge + .proposals + .iter_mut() + .enumerate() + .for_each(|(i, p)| { + p.challenge_id = Some(i); + }); + self.challenges.push(challenge); + self + } + + pub fn pop_proposal(&mut self) -> ProposalConfig { + for challenge in self.challenges.iter_mut() { + if let Some(proposal) = challenge.proposals.pop_front() { + return proposal; + } + } + panic!("no more proposals"); + } +} + +impl ValidVotingTemplateGenerator for ArbitraryValidVotePlanConfig { + fn next_proposal(&mut self) -> ProposalTemplate { + let proposals_builder = self.pop_proposal(); + let challenge = self + .template_generator + .challenges + .get( + proposals_builder + .challenge_id + .expect("internal error: no challenge id set for proposal"), + ) + .unwrap() + .clone(); + + let funds = proposals_builder + .funds + .unwrap_or_else(|| self.template_generator.proposal_fund()); + let proposal_template = self.template_generator.proposal(challenge, funds); + self.template_generator + .proposals + .push(proposal_template.clone()); + proposal_template + } + + fn next_challenge(&mut self) -> ChallengeTemplate { + let challenge_builder = self + .challenges + .get((self.template_generator.next_challenge_id - 1) as usize) + .expect("no more challenges"); + let mut challenge = self.template_generator.next_challenge(); + if let Some(rewards_total) = challenge_builder.rewards_total { + challenge.rewards_total = rewards_total.to_string(); + } + if let Some(proposers_rewards) = challenge_builder.proposers_rewards { + challenge.proposers_rewards = proposers_rewards.to_string(); + } + challenge + } + + fn next_fund(&mut self) -> FundTemplate { + self.template_generator.next_fund() + } + + fn next_review(&mut self) -> ReviewTemplate { + self.template_generator.next_review() + } +} + +#[cfg(test)] +mod test { + use super::*; + use crate::common::data::ArbitraryGenerator; + use crate::common::data::ValidVotePlanGenerator; + use chain_impl_mockchain::testing::scenario::template::ProposalDefBuilder; + use chain_impl_mockchain::testing::scenario::template::VotePlanDefBuilder; + use fake::faker::name::en::Name; + use fake::Fake; + + #[test] + pub fn valid_vote_plan_template_builder() { + let mut vote_plan_parameters = ArbitraryGenerator::default().valid_vote_plan_parameters(); + + let mut vote_plan_builder = VotePlanDefBuilder::new("fund_x"); + vote_plan_builder.owner(&Name().fake::()); + vote_plan_builder.vote_phases(1, 2, 3); + + let mut proposal_builder = ProposalDefBuilder::new( + chain_impl_mockchain::testing::VoteTestGen::external_proposal_id(), + ); + proposal_builder.options(2); + proposal_builder.action_off_chain(); + vote_plan_builder.with_proposal(&mut proposal_builder); + vote_plan_parameters.current_fund.vote_plans = vec![vote_plan_builder.build().into()]; + vote_plan_parameters.current_fund.challenges_count = 1; + + let mut template = ArbitraryValidVotePlanConfig::default().challenge( + ChallengeConfig::default() + .rewards_total(1000) + .proposers_rewards(1000) + .proposal(ProposalConfig::default().funds(100)), + ); + let mut generator = ValidVotePlanGenerator::new(vote_plan_parameters); + let snapshot = generator.build(&mut template); + + println!("{:?}", snapshot); + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/voting/builder/proposal.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/voting/builder/proposal.rs new file mode 100644 index 0000000000..13fa04b5f6 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/voting/builder/proposal.rs @@ -0,0 +1,12 @@ +#[derive(Debug, Clone, Default)] +pub struct ProposalConfig { + pub(crate) funds: Option, + pub(crate) challenge_id: Option, +} + +impl ProposalConfig { + pub fn funds(mut self, funds: i64) -> Self { + self.funds = Some(funds); + self + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/voting/generator.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/voting/generator.rs new file mode 100644 index 0000000000..c1ab00223b --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/voting/generator.rs @@ -0,0 +1,209 @@ +use super::parameters::SingleVotePlanParameters; +use crate::common::data::generator::{ArbitraryGenerator, Snapshot, ValidVotingTemplateGenerator}; +use crate::common::data::ValidVotePlanParameters; +use chain_impl_mockchain::certificate::VotePlan; +use vit_servicing_station_lib_f10::db::models::community_advisors_reviews::AdvisorReview; +use vit_servicing_station_lib_f10::db::models::proposals::FullProposalInfo; +use vit_servicing_station_lib_f10::db::models::{ + challenges::Challenge, + funds::Fund, + proposals::{Category, Proposal, Proposer}, + voteplans::Voteplan, +}; + +pub struct ValidVotePlanGenerator { + parameters: ValidVotePlanParameters, +} + +impl ValidVotePlanGenerator { + pub fn new(parameters: ValidVotePlanParameters) -> Self { + Self { parameters } + } + + fn convert_to_vote_plan(single_vote_plan: &SingleVotePlanParameters) -> VotePlan { + single_vote_plan.vote_plan().into() + } + + pub fn build(&mut self, template_generator: &mut dyn ValidVotingTemplateGenerator) -> Snapshot { + let mut generator = ArbitraryGenerator::new(); + + let fund_template = template_generator.next_fund(); + self.parameters.current_fund.info.fund_goal = fund_template.goal; + + let vote_plans: Vec = self + .parameters + .current_fund + .vote_plans + .iter() + .map(|single_vote_plan| { + let vote_plan = Self::convert_to_vote_plan(single_vote_plan); + + let payload_type = match vote_plan.payload_type() { + chain_impl_mockchain::vote::PayloadType::Public => "public", + chain_impl_mockchain::vote::PayloadType::Private => "private", + }; + + Voteplan { + id: generator.id(), + chain_voteplan_id: vote_plan.to_id().to_string(), + chain_vote_start_time: self.parameters.current_fund.info.dates.voting_start, + chain_vote_end_time: self.parameters.current_fund.info.dates.voting_tally_start, + chain_committee_end_time: self + .parameters + .current_fund + .info + .dates + .voting_tally_end, + chain_voteplan_payload: payload_type.to_string(), + chain_vote_encryption_key: single_vote_plan + .vote_encryption_key() + .unwrap_or_default(), + fund_id: self.parameters.current_fund.info.fund_id, + } + }) + .collect(); + + let challenges: Vec = std::iter::from_fn(|| { + let challenge_data = template_generator.next_challenge(); + Some(Challenge { + internal_id: challenge_data.internal_id, + id: challenge_data.id.parse().unwrap(), + challenge_type: challenge_data.challenge_type, + title: challenge_data.title, + description: challenge_data.description, + rewards_total: challenge_data.rewards_total.parse().unwrap(), + proposers_rewards: challenge_data.proposers_rewards.parse().unwrap(), + fund_id: self.parameters.current_fund.info.fund_id, + challenge_url: challenge_data.challenge_url, + highlights: challenge_data.highlight, + }) + }) + .take(self.parameters.current_fund.challenges_count) + .collect(); + + let mut fund = self + .parameters + .current_fund + .to_fund(vote_plans.clone(), challenges); + + let mut proposals = vec![]; + + for (index, vote_plan) in vote_plans.iter().enumerate() { + for (index, proposal) in self.parameters.current_fund.vote_plans[index] + .proposals() + .iter() + .enumerate() + { + let proposal_template = template_generator.next_proposal(); + let challenge_idx: i32 = proposal_template.challenge_id.unwrap().parse().unwrap(); + let challenge = fund + .challenges + .iter_mut() + .find(|x| x.id == challenge_idx) + .unwrap_or_else(|| { + panic!( + "Cannot find challenge with id: {}. Please set more challenges", + challenge_idx + ) + }); + let proposal_funds = proposal_template.proposal_funds.parse().unwrap(); + + if self + .parameters + .current_fund + .calculate_challenges_total_funds + { + challenge.rewards_total += proposal_funds; + } + + let proposal = Proposal { + internal_id: proposal_template.internal_id.parse().unwrap(), + proposal_id: proposal_template.proposal_id.to_string(), + proposal_category: Category { + category_id: "".to_string(), + category_name: proposal_template.category_name, + category_description: "".to_string(), + }, + proposal_title: proposal_template.proposal_title, + proposal_summary: proposal_template.proposal_summary, + proposal_public_key: generator.hash(), + proposal_funds, + proposal_url: proposal_template.proposal_url.clone(), + proposal_impact_score: proposal_template.proposal_impact_score.parse().unwrap(), + reviews_count: 0, + proposal_files_url: proposal_template.files_url, + proposer: Proposer { + proposer_name: proposal_template.proposer_name, + proposer_email: "".to_string(), + proposer_url: proposal_template.proposer_url, + proposer_relevant_experience: proposal_template + .proposer_relevant_experience, + }, + chain_proposal_id: proposal.id().to_string().as_bytes().to_vec(), + chain_proposal_index: index as i64, + chain_vote_options: self.parameters.current_fund.vote_options.clone(), + chain_voteplan_id: vote_plan.chain_voteplan_id.clone(), + chain_vote_start_time: vote_plan.chain_vote_start_time, + chain_vote_end_time: vote_plan.chain_vote_end_time, + chain_committee_end_time: vote_plan.chain_committee_end_time, + chain_voteplan_payload: vote_plan.chain_voteplan_payload.clone(), + chain_vote_encryption_key: vote_plan.chain_vote_encryption_key.clone(), + fund_id: fund.id, + challenge_id: challenge.id, + }; + + proposals.push(FullProposalInfo { + proposal, + challenge_info: proposal_template.proposal_challenge_info, + challenge_type: challenge.challenge_type.clone(), + }); + } + } + let challenges = fund.challenges.clone(); + + let reviews: Vec = std::iter::from_fn(|| { + let review_data = template_generator.next_review(); + + Some(AdvisorReview { + id: review_data + .id + .unwrap_or_else(|| 0i32.to_string()) + .parse() + .unwrap(), + proposal_id: review_data.proposal_id.parse().unwrap(), + assessor: review_data.assessor, + impact_alignment_rating_given: review_data.impact_alignment_rating_given, + impact_alignment_note: review_data.impact_alignment_note, + feasibility_rating_given: review_data.feasibility_rating_given, + feasibility_note: review_data.feasibility_note, + auditability_rating_given: review_data.auditability_rating_given, + auditability_note: review_data.auditability_note, + ranking: review_data.ranking, + }) + }) + .take(self.parameters.current_fund.reviews_count) + .collect(); + + let goals = fund.goals.clone(); + + let mut funds = vec![fund]; + let next_funds: Vec = self + .parameters + .next_funds + .iter() + .cloned() + .map(Into::into) + .collect(); + funds.extend(next_funds); + + Snapshot::new( + funds, + proposals, + challenges, + generator.tokens(), + vote_plans, + reviews, + goals, + ) + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/voting/mod.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/voting/mod.rs new file mode 100644 index 0000000000..b9d7f16fa4 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/voting/mod.rs @@ -0,0 +1,15 @@ +mod builder; +mod generator; +mod parameters; +mod template; + +pub use builder::{ArbitraryValidVotePlanConfig, ChallengeConfig, ProposalConfig}; +pub use generator::ValidVotePlanGenerator; +pub use parameters::{ + CurrentFund, FundDates, FundInfo, SingleVotePlanParameters, ValidVotePlanParameters, +}; +pub use template::{ + parse_challenges, parse_funds, parse_proposals, parse_reviews, + ArbitraryValidVotingTemplateGenerator, ChallengeTemplate, ExternalValidVotingTemplateGenerator, + FundTemplate, ProposalTemplate, ReviewTemplate, TemplateLoad, ValidVotingTemplateGenerator, +}; diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/voting/parameters/fund/current.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/voting/parameters/fund/current.rs new file mode 100644 index 0000000000..01f4472426 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/voting/parameters/fund/current.rs @@ -0,0 +1,63 @@ +use super::FundInfo; +use crate::common::data::SingleVotePlanParameters; +use chain_impl_mockchain::testing::scenario::template::VotePlanDef; +use vit_servicing_station_lib_f10::db::models::challenges::Challenge; +use vit_servicing_station_lib_f10::db::models::funds::Fund; +use vit_servicing_station_lib_f10::db::models::vote_options::VoteOptions; + +use vit_servicing_station_lib_f10::db::models::voteplans::Voteplan; + +pub struct CurrentFund { + pub vote_plans: Vec, + pub vote_options: VoteOptions, + pub challenges_count: usize, + pub reviews_count: usize, + pub calculate_challenges_total_funds: bool, + pub info: FundInfo, +} + +impl CurrentFund { + pub fn from_single(vote_plan: VotePlanDef, info: FundInfo) -> Self { + Self::new(vec![vote_plan], info) + } + + pub fn new(vote_plans: Vec, info: FundInfo) -> Self { + Self { + vote_plans: vote_plans.into_iter().map(Into::into).collect(), + info, + vote_options: VoteOptions::parse_coma_separated_value("yes,no"), + challenges_count: 4, + reviews_count: 1, + calculate_challenges_total_funds: false, + } + } + + pub fn set_vote_encryption_key(&mut self, vote_encryption_key: String, alias: &str) { + let vote_plan = self + .vote_plans + .iter_mut() + .find(|x| x.alias() == alias) + .unwrap(); + vote_plan.set_vote_encryption_key(vote_encryption_key); + } + + pub fn to_fund(&self, vote_plans: Vec, challenges: Vec) -> Fund { + Fund { + id: self.info.fund_id, + fund_name: self.info.fund_name.clone(), + fund_goal: self.info.fund_goal.clone(), + voting_power_threshold: self.info.voting_power_threshold, + fund_start_time: self.info.dates.voting_start, + fund_end_time: self.info.dates.voting_tally_start, + next_fund_start_time: self.info.dates.next_fund_start_time, + registration_snapshot_time: self.info.dates.registration_snapshot_time, + next_registration_snapshot_time: self.info.dates.next_registration_snapshot_time, + chain_vote_plans: vote_plans, + challenges, + stage_dates: self.info.dates.clone().into(), + goals: self.info.goals.clone(), + results_url: self.info.results_url.clone(), + survey_url: self.info.survey_url.clone(), + } + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/voting/parameters/fund/dates.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/voting/parameters/fund/dates.rs new file mode 100644 index 0000000000..57bd382196 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/voting/parameters/fund/dates.rs @@ -0,0 +1,60 @@ +use crate::common::data::generator::voting::parameters::FundStageDates; +use time::{ext::NumericalDuration, OffsetDateTime}; + +#[derive(Debug, Clone)] +pub struct FundDates { + pub voting_start: i64, + pub voting_tally_start: i64, + pub voting_tally_end: i64, + pub next_fund_start_time: i64, + pub registration_snapshot_time: i64, + pub next_registration_snapshot_time: i64, + pub insight_sharing_start: i64, + pub proposal_submission_start: i64, + pub refine_proposals_start: i64, + pub finalize_proposals_start: i64, + pub proposal_assessment_start: i64, + pub assessment_qa_start: i64, +} + +impl Default for FundDates { + fn default() -> Self { + let now = OffsetDateTime::now_utc(); + Self { + proposal_submission_start: as_timestamp(now - 10.days()), + insight_sharing_start: as_timestamp(now - 9.days()), + refine_proposals_start: as_timestamp(now - 8.days()), + finalize_proposals_start: as_timestamp(now - 7.days()), + proposal_assessment_start: as_timestamp(now - 6.days()), + assessment_qa_start: as_timestamp(now - 5.days()), + registration_snapshot_time: as_timestamp(now - 4.days()), + voting_start: as_timestamp(now + 1.days()), + voting_tally_start: as_timestamp(now + 2.days()), + voting_tally_end: as_timestamp(now + 3.days()), + next_registration_snapshot_time: as_timestamp(now + 7.days()), + next_fund_start_time: as_timestamp(now + 10.days()), + } + } +} + +fn as_timestamp(date: OffsetDateTime) -> i64 { + date.unix_timestamp() +} + +#[allow(clippy::from_over_into)] +impl Into for FundDates { + fn into(self) -> FundStageDates { + FundStageDates { + insight_sharing_start: self.insight_sharing_start, + proposal_submission_start: self.proposal_submission_start, + refine_proposals_start: self.refine_proposals_start, + finalize_proposals_start: self.finalize_proposals_start, + proposal_assessment_start: self.proposal_assessment_start, + assessment_qa_start: self.assessment_qa_start, + snapshot_start: self.registration_snapshot_time, + voting_start: self.voting_start, + voting_end: self.voting_tally_start, + tallying_end: self.voting_tally_end, + } + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/voting/parameters/fund/info.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/voting/parameters/fund/info.rs new file mode 100644 index 0000000000..d758079138 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/voting/parameters/fund/info.rs @@ -0,0 +1,65 @@ +use super::dates::FundDates; +use vit_servicing_station_lib_f10::db::models::{funds::Fund, goals::Goal}; + +#[derive(Debug, Clone)] +pub struct FundInfo { + pub fund_name: String, + pub fund_goal: String, + pub fund_id: i32, + pub voting_power_threshold: i64, + pub dates: FundDates, + pub goals: Vec, + pub results_url: String, + pub survey_url: String, +} + +impl From for FundInfo { + fn from(dates: FundDates) -> Self { + FundInfo { + dates, + ..Default::default() + } + } +} + +#[allow(clippy::from_over_into)] +impl Into for FundInfo { + fn into(self) -> Fund { + Fund { + id: self.fund_id, + fund_name: self.fund_name, + fund_goal: self.fund_goal, + voting_power_threshold: self.voting_power_threshold, + fund_start_time: self.dates.voting_start, + fund_end_time: self.dates.voting_tally_start, + next_fund_start_time: self.dates.next_fund_start_time, + registration_snapshot_time: self.dates.registration_snapshot_time, + next_registration_snapshot_time: self.dates.next_registration_snapshot_time, + chain_vote_plans: vec![], + challenges: vec![], + stage_dates: self.dates.into(), + goals: self.goals, + results_url: self.results_url, + survey_url: self.survey_url, + } + } +} + +impl Default for FundInfo { + fn default() -> Self { + Self { + fund_name: "fund1".to_string(), + fund_id: 1, + fund_goal: "".to_string(), + voting_power_threshold: 500, + dates: Default::default(), + goals: vec![Goal { + id: 1, + goal_name: "goal1".to_string(), + fund_id: 1, + }], + results_url: "http://localhost/fund/1/results/".to_string(), + survey_url: "http://localhost/fund/1/survey/".to_string(), + } + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/voting/parameters/fund/mod.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/voting/parameters/fund/mod.rs new file mode 100644 index 0000000000..59faeb8159 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/voting/parameters/fund/mod.rs @@ -0,0 +1,7 @@ +mod current; +mod dates; +mod info; + +pub use current::CurrentFund; +pub use dates::FundDates; +pub use info::FundInfo; diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/voting/parameters/mod.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/voting/parameters/mod.rs new file mode 100644 index 0000000000..f5267cf69c --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/voting/parameters/mod.rs @@ -0,0 +1,20 @@ +mod fund; +mod vote_plan; + +pub use fund::{CurrentFund, FundDates, FundInfo}; +use vit_servicing_station_lib_f10::db::models::funds::FundStageDates; +pub use vote_plan::SingleVotePlanParameters; + +pub struct ValidVotePlanParameters { + pub current_fund: CurrentFund, + pub next_funds: Vec, +} + +impl From for ValidVotePlanParameters { + fn from(current_fund: CurrentFund) -> Self { + Self { + current_fund, + next_funds: Vec::new(), + } + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/voting/parameters/vote_plan.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/voting/parameters/vote_plan.rs new file mode 100644 index 0000000000..9f2b23dece --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/voting/parameters/vote_plan.rs @@ -0,0 +1,37 @@ +use chain_impl_mockchain::testing::scenario::template::{ProposalDef, VotePlanDef}; + +pub struct SingleVotePlanParameters { + vote_plan: VotePlanDef, + vote_encryption_key: Option, +} + +impl SingleVotePlanParameters { + pub fn proposals(&self) -> Vec { + self.vote_plan.proposals() + } + + pub fn alias(&self) -> String { + self.vote_plan.alias() + } + + pub fn vote_plan(&self) -> VotePlanDef { + self.vote_plan.clone() + } + + pub fn vote_encryption_key(&self) -> Option { + self.vote_encryption_key.clone() + } + + pub fn set_vote_encryption_key(&mut self, vote_encryption_key: String) { + self.vote_encryption_key = Some(vote_encryption_key); + } +} + +impl From for SingleVotePlanParameters { + fn from(vote_plan: VotePlanDef) -> Self { + Self { + vote_plan, + vote_encryption_key: None, + } + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/voting/template/arbitrary.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/voting/template/arbitrary.rs new file mode 100644 index 0000000000..bab6c200a7 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/voting/template/arbitrary.rs @@ -0,0 +1,247 @@ +use super::{ + ChallengeTemplate, FundTemplate, ProposalTemplate, ReviewTemplate, ValidVotingTemplateGenerator, +}; +use crate::common::data::ArbitraryGenerator; +use fake::faker::company::en::CompanyName; +use fake::faker::internet::en::DomainSuffix; +use fake::faker::internet::en::SafeEmail; +use fake::{ + faker::lorem::en::*, + faker::{ + company::en::{Buzzword, CatchPhase, Industry}, + name::en::Name, + }, + Fake, +}; +use vit_servicing_station_lib_f10::db::models::challenges::ChallengeHighlights; +use vit_servicing_station_lib_f10::db::models::community_advisors_reviews::ReviewRanking; +use vit_servicing_station_lib_f10::db::models::proposals::community_choice::ChallengeInfo as CommunityChoiceChallengeInfo; +use vit_servicing_station_lib_f10::db::models::proposals::simple::ChallengeInfo as SimpleChallengeInfo; +use vit_servicing_station_lib_f10::db::models::proposals::Category; +use vit_servicing_station_lib_f10::db::models::proposals::ChallengeType; +use vit_servicing_station_lib_f10::db::models::proposals::ProposalChallengeInfo; +use vit_servicing_station_lib_f10::db::models::proposals::Proposer; +use vit_servicing_station_lib_f10::db::models::vote_options::VoteOptions; + +#[derive(Clone)] +pub struct ArbitraryValidVotingTemplateGenerator { + pub(crate) generator: ArbitraryGenerator, + pub(crate) funds: Vec, + pub(crate) challenges: Vec, + pub(crate) proposals: Vec, + pub(crate) reviews: Vec, + pub(crate) next_proposal_id: i32, + pub(crate) next_challenge_id: i32, + pub(crate) next_review_id: i32, +} + +impl Default for ArbitraryValidVotingTemplateGenerator { + fn default() -> Self { + Self::new() + } +} + +impl ArbitraryValidVotingTemplateGenerator { + pub fn new() -> Self { + Self { + generator: ArbitraryGenerator::new(), + next_proposal_id: 1, + next_challenge_id: 1, + next_review_id: 1, + funds: Vec::new(), + challenges: Vec::new(), + proposals: Vec::new(), + reviews: Vec::new(), + } + } + + pub fn next_challenge_id(&mut self) -> i32 { + let ret = self.next_challenge_id; + self.next_challenge_id = ret + 1; + ret + } + + pub fn next_proposal_id(&mut self) -> i32 { + let ret = self.next_proposal_id; + self.next_proposal_id = ret + 1; + ret + } + + pub fn next_review_id(&mut self) -> i32 { + let ret = self.next_review_id; + self.next_review_id = ret + 1; + ret + } + + pub fn gen_http_address(&self) -> String { + format!( + "http://{}.{}", + CompanyName() + .fake::() + .to_lowercase() + .replace(' ', "-"), + DomainSuffix().fake::() + ) + } + + pub fn gen_highlights(&mut self) -> Option { + match self.generator.next_u32() % 2 { + 0 => None, + _ => Some(ChallengeHighlights { + sponsor: CompanyName().fake::(), + }), + } + } + + pub fn proposer(&mut self) -> Proposer { + Proposer { + proposer_relevant_experience: Buzzword().fake::(), + proposer_name: Name().fake::(), + proposer_email: SafeEmail().fake::(), + proposer_url: self.gen_http_address(), + } + } + // impact score [1.00-4.99] + pub fn impact_score(&mut self) -> i64 { + (self.generator.next_u64() % 400 + 100) as i64 + } + + pub fn proposal_category(&mut self) -> Category { + Category { + category_id: "".to_string(), + category_name: Industry().fake::(), + category_description: "".to_string(), + } + } + + pub fn proposal_fund(&mut self) -> i64 { + (self.generator.next_u64() % 200_000 + 5000) as i64 + } + + pub fn challenge_type(&mut self) -> ChallengeType { + match self.generator.next_u32() % 3 { + 0 => ChallengeType::Simple, + 1 => ChallengeType::CommunityChoice, + 2 => ChallengeType::Native, + _ => unreachable!(), + } + } + + pub fn proposals_challenge_info( + &mut self, + challenge_type: &ChallengeType, + ) -> ProposalChallengeInfo { + match challenge_type { + ChallengeType::Simple | ChallengeType::Native => { + ProposalChallengeInfo::Simple(SimpleChallengeInfo { + proposal_solution: CatchPhase().fake::(), + }) + } + ChallengeType::CommunityChoice => { + ProposalChallengeInfo::CommunityChoice(CommunityChoiceChallengeInfo { + proposal_brief: CatchPhase().fake::(), + proposal_importance: CatchPhase().fake::(), + proposal_goal: CatchPhase().fake::(), + proposal_metrics: CatchPhase().fake::(), + }) + } + } + } + + pub fn proposal(&mut self, challenge: ChallengeTemplate, funds: i64) -> ProposalTemplate { + let proposal_url = self.gen_http_address(); + let challenge_type = challenge.challenge_type.clone(); + let proposal_challenge_info = self.proposals_challenge_info(&challenge_type); + ProposalTemplate { + proposal_id: self.next_proposal_id().to_string(), + internal_id: self.generator.id().to_string(), + category_name: Industry().fake::(), + proposal_title: CatchPhase().fake::(), + proposal_summary: CatchPhase().fake::(), + + proposal_funds: funds.to_string(), + proposal_url: proposal_url.to_string(), + proposal_impact_score: self.impact_score().to_string(), + files_url: format!("{}/files", proposal_url), + proposer_relevant_experience: self.proposer().proposer_relevant_experience, + chain_vote_options: VoteOptions::parse_coma_separated_value("yes,no"), + proposer_name: Name().fake::(), + proposer_url: self.gen_http_address(), + chain_vote_type: "public".to_string(), + challenge_id: Some(challenge.id), + challenge_type, + proposal_challenge_info, + } + } +} + +impl ValidVotingTemplateGenerator for ArbitraryValidVotingTemplateGenerator { + fn next_proposal(&mut self) -> ProposalTemplate { + let challenge = self + .challenges + .get(self.generator.random_index(self.challenges.len())) + .unwrap() + .clone(); + + let funds = self.proposal_fund(); + let proposal_template = self.proposal(challenge, funds); + self.proposals.push(proposal_template.clone()); + proposal_template + } + + fn next_challenge(&mut self) -> ChallengeTemplate { + let challenge = ChallengeTemplate { + internal_id: self.next_challenge_id(), + id: self.generator.id().to_string(), + challenge_type: self.challenge_type(), + title: CatchPhase().fake::(), + description: Buzzword().fake::(), + rewards_total: (self.generator.next_u32() % 10000).to_string(), + proposers_rewards: (self.generator.next_u32() % 10000).to_string(), + challenge_url: self.gen_http_address(), + fund_id: None, + highlight: self.gen_highlights(), + }; + self.challenges.push(challenge.clone()); + challenge + } + + fn next_fund(&mut self) -> FundTemplate { + let fund = FundTemplate { + id: self.generator.id().abs(), + goal: "How will we encourage developers and entrepreneurs to build Dapps and businesses on top of Cardano in the next 6 months?".to_string(), + rewards_info: Sentence(3..5).fake::(), + threshold: Some(self.generator.next_u32()), + }; + self.funds.push(fund.clone()); + fund + } + + fn next_review(&mut self) -> ReviewTemplate { + let proposal_id = self + .proposals + .get(self.generator.random_index(self.proposals.len())) + .map(|proposal| proposal.proposal_id.clone()) + .unwrap(); + let ranking = match self.generator.next_u32() % 2 { + 0 => ReviewRanking::Excellent, + 1 => ReviewRanking::Good, + _ => unreachable!("do not generate other review types for now"), + }; + + let review = ReviewTemplate { + id: Some(self.next_review_id().to_string()), + proposal_id, + assessor: Name().fake::(), + impact_alignment_rating_given: (self.generator.next_u32() % 5) as i32, + impact_alignment_note: fake::faker::lorem::en::Sentence(0..100).fake::(), + feasibility_rating_given: (self.generator.next_u32() % 5) as i32, + feasibility_note: fake::faker::lorem::en::Sentence(0..100).fake::(), + auditability_rating_given: (self.generator.next_u32() % 5) as i32, + auditability_note: fake::faker::lorem::en::Sentence(0..100).fake::(), + ranking, + }; + self.reviews.push(review.clone()); + review + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/voting/template/external.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/voting/template/external.rs new file mode 100644 index 0000000000..ff7d20db1b --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/voting/template/external.rs @@ -0,0 +1,99 @@ +use super::{ + ChallengeTemplate, FundTemplate, ProposalTemplate, ReviewTemplate, ValidVotingTemplateGenerator, +}; +use std::{collections::LinkedList, path::PathBuf}; +use thiserror::Error; + +impl ValidVotingTemplateGenerator for ExternalValidVotingTemplateGenerator { + fn next_proposal(&mut self) -> ProposalTemplate { + self.proposals + .pop_front() + .unwrap_or_else(|| panic!("no more proposals")) + } + + fn next_challenge(&mut self) -> ChallengeTemplate { + self.challenges + .pop_front() + .unwrap_or_else(|| panic!("no more challenges")) + } + + fn next_fund(&mut self) -> FundTemplate { + self.funds + .pop_front() + .unwrap_or_else(|| panic!("no more funds")) + } + + fn next_review(&mut self) -> ReviewTemplate { + self.reviews + .pop_front() + .unwrap_or_else(|| panic!("no more reviews")) + } +} + +#[derive(Clone)] +pub struct ExternalValidVotingTemplateGenerator { + proposals: LinkedList, + challenges: LinkedList, + funds: LinkedList, + reviews: LinkedList, +} + +impl ExternalValidVotingTemplateGenerator { + pub fn new( + proposals: PathBuf, + challenges: PathBuf, + funds: PathBuf, + reviews: PathBuf, + ) -> Result { + Ok(Self { + proposals: parse_proposals(proposals)?, + challenges: parse_challenges(challenges)?, + funds: parse_funds(funds)?, + reviews: parse_reviews(reviews)?, + }) + } + + pub fn proposals_count(&self) -> usize { + self.proposals.len() + } + + pub fn challenges_count(&self) -> usize { + self.challenges.len() + } +} + +pub fn parse_proposals(proposals: PathBuf) -> Result, TemplateLoad> { + serde_json::from_str(&std::fs::read_to_string(proposals)?) + .map_err(|err| TemplateLoad::Proposal(err.to_string())) +} + +pub fn parse_challenges( + challenges: PathBuf, +) -> Result, TemplateLoad> { + serde_json::from_str(&std::fs::read_to_string(challenges)?) + .map_err(|err| TemplateLoad::Challenge(err.to_string())) +} + +pub fn parse_funds(funds: PathBuf) -> Result, TemplateLoad> { + serde_json::from_str(&std::fs::read_to_string(funds)?) + .map_err(|err| TemplateLoad::Fund(err.to_string())) +} + +pub fn parse_reviews(reviews: PathBuf) -> Result, TemplateLoad> { + serde_json::from_str(&std::fs::read_to_string(reviews)?) + .map_err(|err| TemplateLoad::Review(err.to_string())) +} + +#[derive(Debug, Error)] +pub enum TemplateLoad { + #[error("cannot parse proposals, due to {0}")] + Proposal(String), + #[error("cannot parse challenges, due to: {0}")] + Challenge(String), + #[error("cannot parse funds, due to: {0}")] + Fund(String), + #[error("cannot parse reviews, due to: {0}")] + Review(String), + #[error(transparent)] + Io(#[from] std::io::Error), +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/voting/template/mod.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/voting/template/mod.rs new file mode 100644 index 0000000000..f7b6d017c1 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/generator/voting/template/mod.rs @@ -0,0 +1,89 @@ +mod arbitrary; +mod external; + +pub use arbitrary::ArbitraryValidVotingTemplateGenerator; +pub use external::{ + parse_challenges, parse_funds, parse_proposals, parse_reviews, + ExternalValidVotingTemplateGenerator, TemplateLoad, +}; +use serde::{Deserialize, Serialize}; +use vit_servicing_station_lib_f10::db::models::challenges::ChallengeHighlights; +use vit_servicing_station_lib_f10::db::models::community_advisors_reviews::ReviewRanking; +use vit_servicing_station_lib_f10::db::models::proposals::{ChallengeType, ProposalChallengeInfo}; +use vit_servicing_station_lib_f10::db::models::vote_options::VoteOptions; + +#[derive(Serialize, Deserialize, Clone)] +pub struct FundTemplate { + pub id: i32, + pub goal: String, + pub rewards_info: String, + pub threshold: Option, +} +#[derive(Serialize, Deserialize, Clone)] +pub struct ProposalTemplate { + pub internal_id: String, + pub category_name: String, + pub proposal_id: String, + pub proposal_title: String, + #[serde(default)] + pub proposal_summary: String, + pub proposal_funds: String, + pub proposal_url: String, + pub proposal_impact_score: String, + #[serde(default)] + pub files_url: String, + pub proposer_name: String, + #[serde(default)] + pub proposer_url: String, + #[serde(default)] + pub proposer_relevant_experience: String, + #[serde( + deserialize_with = "vit_servicing_station_lib_f10::utils::serde::deserialize_vote_options_from_string" + )] + pub chain_vote_options: VoteOptions, + pub chain_vote_type: String, + pub challenge_id: Option, + pub challenge_type: ChallengeType, + #[serde(flatten)] + pub proposal_challenge_info: ProposalChallengeInfo, +} + +#[derive(Serialize, Deserialize, Clone)] +pub struct ChallengeTemplate { + pub internal_id: i32, + pub id: String, + pub challenge_type: ChallengeType, + pub title: String, + pub description: String, + pub rewards_total: String, + pub proposers_rewards: String, + pub challenge_url: String, + pub fund_id: Option, + pub highlight: Option, +} + +#[derive(Serialize, Deserialize, Clone)] +pub struct ReviewTemplate { + pub id: Option, + pub proposal_id: String, + pub assessor: String, + pub impact_alignment_rating_given: i32, + pub impact_alignment_note: String, + pub feasibility_rating_given: i32, + pub feasibility_note: String, + pub auditability_rating_given: i32, + pub auditability_note: String, + pub ranking: ReviewRanking, +} + +#[derive(Serialize, Deserialize, Clone)] +pub struct ProposalChallengeInfoTemplate { + pub id: i32, +} + +pub trait ValidVotingTemplateGenerator { + fn next_proposal(&mut self) -> ProposalTemplate; + fn next_challenge(&mut self) -> ChallengeTemplate; + fn next_fund(&mut self) -> FundTemplate; + fn next_review(&mut self) -> ReviewTemplate; +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/mod.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/mod.rs new file mode 100644 index 0000000000..ed36586048 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/data/mod.rs @@ -0,0 +1,46 @@ +use vit_servicing_station_lib_f10::db::models::{ + api_tokens::ApiTokenData, challenges::Challenge, funds::Fund, proposals::FullProposalInfo, + voteplans::Voteplan, +}; +mod csv_converter; +mod generator; + +pub use csv_converter::CsvConverter; +pub use generator::*; + +pub fn token() -> (String, ApiTokenData) { + ArbitrarySnapshotGenerator::default().token() +} + +pub fn token_hash() -> String { + token().0 +} + +pub fn proposals() -> Vec { + let mut gen = ArbitrarySnapshotGenerator::default(); + let funds = gen.funds(); + gen.proposals(&funds) +} + +pub fn funds() -> Vec { + ArbitrarySnapshotGenerator::default().funds() +} + +pub fn voteplans() -> Vec { + let mut gen = ArbitrarySnapshotGenerator::default(); + let funds = gen.funds(); + gen.voteplans(&funds) +} + +pub fn challenges() -> Vec { + let mut gen = ArbitrarySnapshotGenerator::default(); + let funds = gen.funds(); + gen.challenges(&funds) +} + +pub fn multivoteplan_snapshot() -> Snapshot { + let mut template = ArbitraryValidVotingTemplateGenerator::new(); + let mut generator = + ValidVotePlanGenerator::new(ArbitraryGenerator::default().valid_vote_plan_parameters()); + generator.build(&mut template) +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/db/mod.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/db/mod.rs new file mode 100644 index 0000000000..8e3cbec363 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/db/mod.rs @@ -0,0 +1,204 @@ +use diesel::expression_methods::ExpressionMethods; +use diesel::query_dsl::RunQueryDsl; +use diesel::{Insertable, SqliteConnection}; +use thiserror::Error; +use vit_servicing_station_lib_f10::db::models::community_advisors_reviews::AdvisorReview; +use vit_servicing_station_lib_f10::db::models::goals::InsertGoal; +use vit_servicing_station_lib_f10::db::schema::goals; +use vit_servicing_station_lib_f10::db::{ + models::{ + api_tokens::ApiTokenData, + challenges::Challenge, + funds::Fund, + proposals::{FullProposalInfo, ProposalChallengeInfo}, + }, + schema::{ + api_tokens, challenges, community_advisors_reviews, funds, + proposal_community_choice_challenge, proposal_simple_challenge, proposals, voteplans, + }, +}; + +pub struct DbInserter<'a> { + connection: &'a SqliteConnection, +} + +impl<'a> DbInserter<'a> { + pub fn new(connection: &'a SqliteConnection) -> Self { + Self { connection } + } + + pub fn insert_token(&self, token_data: &ApiTokenData) -> Result<(), DbInserterError> { + let values = ( + api_tokens::dsl::token.eq(token_data.token.as_ref()), + api_tokens::dsl::creation_time.eq(token_data.creation_time), + api_tokens::dsl::expire_time.eq(token_data.expire_time), + ); + + diesel::insert_into(api_tokens::table) + .values(values) + .execute(self.connection) + .map_err(DbInserterError::DieselError)?; + + Ok(()) + } + + pub fn insert_tokens(&self, tokens_data: &[ApiTokenData]) -> Result<(), DbInserterError> { + for token_data in tokens_data { + self.insert_token(token_data)?; + } + Ok(()) + } + + pub fn insert_proposals(&self, proposals: &[FullProposalInfo]) -> Result<(), DbInserterError> { + for proposal in proposals { + let values = ( + proposals::id.eq(proposal.proposal.internal_id), + proposals::proposal_id.eq(proposal.proposal.proposal_id.clone()), + proposals::proposal_category.eq(proposal + .proposal + .proposal_category + .category_name + .clone()), + proposals::proposal_title.eq(proposal.proposal.proposal_title.clone()), + proposals::proposal_summary.eq(proposal.proposal.proposal_summary.clone()), + proposals::proposal_public_key.eq(proposal.proposal.proposal_public_key.clone()), + proposals::proposal_funds.eq(proposal.proposal.proposal_funds), + proposals::proposal_url.eq(proposal.proposal.proposal_url.clone()), + proposals::proposal_files_url.eq(proposal.proposal.proposal_files_url.clone()), + proposals::proposer_name.eq(proposal.proposal.proposer.proposer_name.clone()), + proposals::proposer_contact.eq(proposal.proposal.proposer.proposer_email.clone()), + proposals::proposer_url.eq(proposal.proposal.proposer.proposer_url.clone()), + proposals::proposal_impact_score.eq(proposal.proposal.proposal_impact_score), + proposals::proposer_relevant_experience.eq(proposal + .proposal + .proposer + .proposer_relevant_experience + .clone()), + proposals::chain_proposal_id.eq(proposal.proposal.chain_proposal_id.clone()), + proposals::chain_proposal_index.eq(proposal.proposal.chain_proposal_index), + proposals::chain_vote_options + .eq(proposal.proposal.chain_vote_options.as_csv_string()), + proposals::chain_voteplan_id.eq(proposal.proposal.chain_voteplan_id.clone()), + proposals::challenge_id.eq(proposal.proposal.challenge_id), + ); + diesel::insert_into(proposals::table) + .values(values) + .execute(self.connection) + .map_err(DbInserterError::DieselError)?; + + let voteplan_values = ( + voteplans::chain_voteplan_id.eq(proposal.proposal.chain_voteplan_id.clone()), + voteplans::chain_vote_start_time.eq(proposal.proposal.chain_vote_start_time), + voteplans::chain_vote_end_time.eq(proposal.proposal.chain_vote_end_time), + voteplans::chain_committee_end_time.eq(proposal.proposal.chain_committee_end_time), + voteplans::chain_voteplan_payload + .eq(proposal.proposal.chain_voteplan_payload.clone()), + voteplans::chain_vote_encryption_key + .eq(proposal.proposal.chain_vote_encryption_key.clone()), + voteplans::fund_id.eq(proposal.proposal.fund_id), + ); + + diesel::insert_or_ignore_into(voteplans::table) + .values(voteplan_values) + .execute(self.connection) + .map_err(DbInserterError::DieselError)?; + + match &proposal.challenge_info { + ProposalChallengeInfo::Simple(data) => { + let simple_values = ( + proposal_simple_challenge::proposal_id + .eq(proposal.proposal.proposal_id.clone()), + proposal_simple_challenge::proposal_solution + .eq(data.proposal_solution.clone()), + ); + diesel::insert_into(proposal_simple_challenge::table) + .values(simple_values) + .execute(self.connection) + .map_err(DbInserterError::DieselError)?; + } + ProposalChallengeInfo::CommunityChoice(data) => { + let community_values = ( + proposal_community_choice_challenge::proposal_id + .eq(proposal.proposal.proposal_id.clone()), + proposal_community_choice_challenge::proposal_brief + .eq(data.proposal_brief.clone()), + proposal_community_choice_challenge::proposal_importance + .eq(data.proposal_importance.clone()), + proposal_community_choice_challenge::proposal_goal + .eq(data.proposal_goal.clone()), + proposal_community_choice_challenge::proposal_metrics + .eq(data.proposal_metrics.clone()), + ); + diesel::insert_into(proposal_community_choice_challenge::table) + .values(community_values) + .execute(self.connection) + .map_err(DbInserterError::DieselError)?; + } + }; + } + Ok(()) + } + + pub fn insert_funds(&self, funds: &[Fund]) -> Result<(), DbInserterError> { + for fund in funds { + let values = fund.clone().values(); + + diesel::insert_into(funds::table) + .values(values) + .execute(self.connection) + .map_err(DbInserterError::DieselError)?; + + for voteplan in &fund.chain_vote_plans { + let values = ( + voteplans::id.eq(voteplan.id), + voteplans::chain_voteplan_id.eq(voteplan.chain_voteplan_id.clone()), + voteplans::chain_vote_start_time.eq(voteplan.chain_vote_start_time), + voteplans::chain_vote_end_time.eq(voteplan.chain_vote_end_time), + voteplans::chain_committee_end_time.eq(voteplan.chain_committee_end_time), + voteplans::chain_voteplan_payload.eq(voteplan.chain_voteplan_payload.clone()), + voteplans::chain_vote_encryption_key + .eq(voteplan.chain_vote_encryption_key.clone()), + voteplans::fund_id.eq(voteplan.fund_id), + ); + diesel::insert_or_ignore_into(voteplans::table) + .values(values) + .execute(self.connection) + .map_err(DbInserterError::DieselError)?; + } + + for goal in &fund.goals { + diesel::insert_or_ignore_into(goals::table) + .values(InsertGoal::from(goal)) + .execute(self.connection) + .map_err(DbInserterError::DieselError)?; + } + } + Ok(()) + } + + pub fn insert_challenges(&self, challenges: &[Challenge]) -> Result<(), DbInserterError> { + for challenge in challenges { + diesel::insert_or_ignore_into(challenges::table) + .values(challenge.clone().values()) + .execute(self.connection) + .map_err(DbInserterError::DieselError)?; + } + Ok(()) + } + + pub fn insert_advisor_reviews(&self, reviews: &[AdvisorReview]) -> Result<(), DbInserterError> { + for review in reviews { + diesel::insert_or_ignore_into(community_advisors_reviews::table) + .values(review.clone().values()) + .execute(self.connection) + .map_err(DbInserterError::DieselError)?; + } + Ok(()) + } +} + +#[derive(Error, Debug)] +pub enum DbInserterError { + #[error("internal diesel error")] + DieselError(#[from] diesel::result::Error), +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/load/mod.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/load/mod.rs new file mode 100644 index 0000000000..981cd0f4a3 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/load/mod.rs @@ -0,0 +1,43 @@ +mod rest; + +use crate::common::data::Snapshot as Data; +use rand::rngs::OsRng; +use rand::RngCore; +pub use rest::VitRestRequestGenerator; + +#[derive(Clone, Debug)] +struct SnapshotRandomizer { + snapshot: Data, + random: OsRng, +} + +impl SnapshotRandomizer { + pub fn new(snapshot: Data) -> Self { + Self { + snapshot, + random: OsRng, + } + } + + pub fn random_token(&mut self) -> String { + let tokens = self.snapshot.tokens(); + let random_idx = self.random_usize() % tokens.len(); + tokens.keys().nth(random_idx).cloned().unwrap() + } + + pub fn random_usize(&mut self) -> usize { + self.random.next_u32() as usize + } + + pub fn random_proposal_id(&mut self) -> i32 { + let proposals = self.snapshot.proposals(); + let random_idx = self.random_usize() % proposals.len(); + proposals.get(random_idx).unwrap().proposal.internal_id + } + + pub fn random_fund_id(&mut self) -> i32 { + let funds = self.snapshot.funds(); + let random_idx = self.random_usize() % funds.len(); + funds.get(random_idx).unwrap().id + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/load/rest.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/load/rest.rs new file mode 100644 index 0000000000..6a95ef85c8 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/load/rest.rs @@ -0,0 +1,69 @@ +use crate::common::clients::RestClient; +use crate::common::data::Snapshot as Data; +use crate::common::load::SnapshotRandomizer; +use jortestkit::load::{Request, RequestFailure, RequestGenerator}; +use std::time::Duration; + +#[derive(Clone, Debug)] +pub struct VitRestRequestGenerator { + rest_client: RestClient, + snapshot_randomizer: SnapshotRandomizer, +} + +impl VitRestRequestGenerator { + pub fn new(snapshot: Data, mut rest_client: RestClient) -> Self { + rest_client.disable_log(); + + Self { + snapshot_randomizer: SnapshotRandomizer::new(snapshot), + rest_client, + } + } +} + +impl RequestGenerator for VitRestRequestGenerator { + fn next(&mut self) -> std::result::Result { + self.rest_client + .set_api_token(self.snapshot_randomizer.random_token()); + + match self.snapshot_randomizer.random_usize() % 3 { + 0 => self + .rest_client + .health() + .map(|_| Request { + ids: vec![Option::None], + duration: Duration::ZERO, + }) + .map_err(|e| RequestFailure::General(format!("Health: {}", e))), + 1 => self + .rest_client + .proposals() + .map(|_| Request { + ids: vec![Option::None], + duration: Duration::ZERO, + }) + .map_err(|e| RequestFailure::General(format!("Proposals: {}", e))), + 2 => self + .rest_client + .proposal(&self.snapshot_randomizer.random_proposal_id().to_string()) + .map(|_| Request { + ids: vec![Option::None], + duration: Duration::ZERO, + }) + .map_err(|e| RequestFailure::General(format!("Proposals by id: {}", e))), + 3 => self + .rest_client + .fund(&self.snapshot_randomizer.random_fund_id().to_string()) + .map(|_| Request { + ids: vec![Option::None], + duration: Duration::ZERO, + }) + .map_err(|e| RequestFailure::General(format!("Funds by id: {}", e))), + _ => unreachable!(), + } + } + + fn split(self) -> (Self, Option) { + todo!() + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/logger.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/logger.rs new file mode 100644 index 0000000000..4c25d14aaf --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/logger.rs @@ -0,0 +1,29 @@ +use std::{ + fs::File, + io::{prelude::*, BufReader}, + path::PathBuf, +}; + +pub struct Logger { + log_file: PathBuf, +} + +impl Logger { + pub fn new(log_file: PathBuf) -> Self { + Self { log_file } + } + + pub fn log_file(&self) -> &PathBuf { + &self.log_file + } + + fn log_lines(&self) -> Vec { + let file = File::open(self.log_file()).expect("logger file not found"); + let buf = BufReader::new(file); + buf.lines().map(|l| l.unwrap()).collect() + } + + pub fn any_error(&self) -> bool { + self.log_lines().iter().any(|x| x.contains("[ERROR]")) + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/mod.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/mod.rs new file mode 100644 index 0000000000..15663dfc15 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/mod.rs @@ -0,0 +1,9 @@ +pub mod cli; +pub mod clients; +pub mod data; +pub mod db; +pub mod load; +pub mod logger; +pub mod paths; +pub mod server; +pub mod startup; diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/paths.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/paths.rs new file mode 100644 index 0000000000..6e090589c6 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/paths.rs @@ -0,0 +1,2 @@ +pub const MIGRATION_DIR: &str = "../vit-servicing-station-lib/migrations"; +pub const BLOCK0_BIN: &str = "../resources/tests/block0.bin"; diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/server.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/server.rs new file mode 100644 index 0000000000..775428128e --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/server.rs @@ -0,0 +1,54 @@ +use super::clients::RestClient; +use super::logger::Logger; +use std::path::PathBuf; +use std::process::Child; +use vit_servicing_station_lib_f10::server::settings::ServiceSettings; + +pub struct Server { + process: Child, + settings: ServiceSettings, + log_file: PathBuf, +} + +impl Server { + pub fn new(process: Child, settings: ServiceSettings, log_file: PathBuf) -> Self { + Self { + process, + settings, + log_file, + } + } + + pub fn rest_client(&self) -> RestClient { + RestClient::from(&self.settings) + } + + pub fn settings(&self) -> ServiceSettings { + self.settings.clone() + } + + pub fn rest_client_with_token(&self, token: &str) -> RestClient { + let mut rest_client = self.rest_client(); + rest_client.set_api_token(token.to_string()); + rest_client + } + + pub fn logger(&self) -> Logger { + Logger::new(self.log_file.clone()) + } + + pub fn is_token_valid(&self, token: &str) -> bool { + self.is_up(token) + } + + pub fn is_up(&self, token: &str) -> bool { + self.rest_client_with_token(token).health().is_ok() + } +} + +impl Drop for Server { + fn drop(&mut self) { + let _ = self.process.kill(); + self.process.wait().unwrap(); + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/startup/db.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/startup/db.rs new file mode 100644 index 0000000000..77710e60d0 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/startup/db.rs @@ -0,0 +1,186 @@ +use assert_fs::{fixture::PathChild, TempDir}; +use diesel::{connection::Connection, prelude::*}; +use std::io; +use std::path::Path; +use std::path::PathBuf; +use std::str::FromStr; +use thiserror::Error; +use vit_servicing_station_lib_f10::db::models::{ + api_tokens::ApiTokenData, challenges::Challenge, funds::Fund, +}; + +use crate::common::{ + data::Snapshot, + db::{DbInserter, DbInserterError}, + paths::MIGRATION_DIR, +}; +use vit_servicing_station_lib_f10::db::models::community_advisors_reviews::AdvisorReview; +use vit_servicing_station_lib_f10::db::models::proposals::FullProposalInfo; + +const VIT_STATION_DB: &str = "vit_station.db"; + +pub struct DbBuilder { + migrations_folder: Option, + tokens: Option>, + proposals: Option>, + funds: Option>, + challenges: Option>, + advisor_reviews: Option>, +} + +impl DbBuilder { + pub fn new() -> Self { + Self { + migrations_folder: Some(PathBuf::from_str(MIGRATION_DIR).unwrap()), + tokens: None, + proposals: None, + funds: None, + challenges: None, + advisor_reviews: None, + } + } + + pub fn with_tokens(&mut self, tokens: Vec) -> &mut Self { + self.tokens = Some(tokens); + self + } + + pub fn with_token(&mut self, token: ApiTokenData) -> &mut Self { + self.with_tokens(vec![token]); + self + } + + pub fn with_proposals(&mut self, proposals: Vec) -> &mut Self { + self.proposals = Some(proposals); + self + } + + pub fn with_challenges(&mut self, challenges: Vec) -> &mut Self { + self.challenges = Some(challenges); + self + } + + pub fn with_snapshot(&mut self, snapshot: &Snapshot) -> &mut Self { + self.with_proposals(snapshot.proposals()); + self.with_tokens(snapshot.tokens().values().cloned().collect()); + self.with_funds(snapshot.funds()); + self.with_challenges(snapshot.challenges()); + self.with_advisor_reviews(snapshot.advisor_reviews()); + self + } + + pub fn with_funds(&mut self, funds: Vec) -> &mut Self { + self.funds = Some(funds); + self + } + + pub fn with_advisor_reviews(&mut self, reviews: Vec) -> &mut Self { + self.advisor_reviews = Some(reviews); + self + } + + pub fn disable_migrations(&mut self) -> &mut Self { + self.migrations_folder = None; + self + } + + pub fn with_migrations_from>(&mut self, migrations_folder: P) -> &mut Self { + self.migrations_folder = Some(migrations_folder.as_ref().into()); + self + } + + fn do_migration( + &self, + connection: &SqliteConnection, + migration_folder: &Path, + ) -> Result<(), DbBuilderError> { + let stdout = io::stdout(); + let mut handle = stdout.lock(); + diesel_migrations::run_pending_migrations_in_directory( + connection, + migration_folder, + &mut handle, + ) + .map_err(DbBuilderError::MigrationsError) + } + + fn try_do_migration(&self, connection: &SqliteConnection) -> Result<(), DbBuilderError> { + if let Some(migrations_folder) = &self.migrations_folder { + self.do_migration(connection, migrations_folder)?; + } + Ok(()) + } + + fn try_insert_tokens(&self, connection: &SqliteConnection) -> Result<(), DbBuilderError> { + if let Some(tokens) = &self.tokens { + DbInserter::new(connection).insert_tokens(tokens)?; + } + Ok(()) + } + + fn try_insert_funds(&self, connection: &SqliteConnection) -> Result<(), DbBuilderError> { + if let Some(funds) = &self.funds { + DbInserter::new(connection).insert_funds(funds)?; + } + Ok(()) + } + + fn try_insert_proposals(&self, connection: &SqliteConnection) -> Result<(), DbBuilderError> { + if let Some(proposals) = &self.proposals { + DbInserter::new(connection).insert_proposals(proposals)?; + } + Ok(()) + } + + fn try_insert_challenges(&self, connection: &SqliteConnection) -> Result<(), DbBuilderError> { + if let Some(challenges) = &self.challenges { + DbInserter::new(connection).insert_challenges(challenges)?; + } + + Ok(()) + } + + fn try_insert_reviews(&self, connection: &SqliteConnection) -> Result<(), DbBuilderError> { + if let Some(reviews) = &self.advisor_reviews { + DbInserter::new(connection).insert_advisor_reviews(reviews)?; + } + Ok(()) + } + + pub fn build(&self, temp_dir: &TempDir) -> Result { + self.build_into_path(temp_dir.child(VIT_STATION_DB).path()) + } + + pub fn build_into_path>(&self, path: P) -> Result { + let path = path.as_ref(); + let db_path = path.to_str().ok_or(DbBuilderError::CannotExtractTempPath)?; + println!("Building db in {:?}...", db_path); + + let connection = SqliteConnection::establish(db_path)?; + self.try_do_migration(&connection)?; + self.try_insert_tokens(&connection)?; + self.try_insert_funds(&connection)?; + self.try_insert_proposals(&connection)?; + self.try_insert_challenges(&connection)?; + self.try_insert_reviews(&connection)?; + Ok(path.to_path_buf()) + } +} + +impl Default for DbBuilder { + fn default() -> Self { + Self::new() + } +} + +#[derive(Error, Debug)] +pub enum DbBuilderError { + #[error("cannot insert data")] + DbInserterError(#[from] DbInserterError), + #[error("Cannot open or create database")] + CannotCreateDatabase(#[from] diesel::ConnectionError), + #[error("Cannot initialize on temp directory")] + CannotExtractTempPath, + #[error("migration errors")] + MigrationsError(#[from] diesel::migration::RunMigrationsError), +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/startup/mod.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/startup/mod.rs new file mode 100644 index 0000000000..2f7b540a93 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/startup/mod.rs @@ -0,0 +1,93 @@ +pub mod db; +pub mod server; + +use assert_fs::TempDir; +use lazy_static::lazy_static; +use rand::Rng; +use std::env; +use std::path::PathBuf; +use std::sync::atomic::{AtomicU32, Ordering}; + +use self::{db::DbBuilder, server::ServerBootstrapper}; +use crate::common::data::ArbitrarySnapshotGenerator; +use crate::common::{data, server::Server}; +use data::Snapshot; +use server::ServerBootstrapperError; + +pub fn get_exe() -> PathBuf { + const VIT_BIN_NAME: &str = env!("VIT_BIN_NAME"); + let mut path = get_working_directory(); + path.push(VIT_BIN_NAME); + if cfg!(windows) { + path.set_extension("exe"); + } + assert!( + path.is_file(), + "File does not exist: {:?}, pwd: {:?}", + path, + env::current_dir() + ); + path +} + +pub fn get_cli_exe() -> PathBuf { + const VIT_BIN_NAME: &str = env!("VIT_CLI_NAME"); + let mut path = get_working_directory(); + path.push(VIT_BIN_NAME); + if cfg!(windows) { + path.set_extension("exe"); + } + assert!( + path.is_file(), + "File does not exist: {:?}, pwd: {:?}", + path, + env::current_dir() + ); + path +} + +/// Gets working directory +/// Uses std::env::current_exe() for this purpose. +/// Current exe directory is ./target/{profile}/deps/{app_name}.exe +/// Function returns ./target/{profile} +fn get_working_directory() -> PathBuf { + let mut output_directory: PathBuf = std::env::current_exe().unwrap(); + + output_directory.pop(); + + if output_directory.ends_with("deps") { + output_directory.pop(); + } + output_directory +} + +lazy_static! { + static ref NEXT_AVAILABLE_PORT_NUMBER: AtomicU32 = { + let initial_port = rand::thread_rng().gen_range(6000, 10999); + AtomicU32::new(initial_port) + }; +} + +pub fn get_available_port() -> u32 { + NEXT_AVAILABLE_PORT_NUMBER.fetch_add(1, Ordering::SeqCst) +} + +pub fn quick_start(temp_dir: &TempDir) -> Result<(Server, Snapshot), ServerBootstrapperError> { + let snapshot = ArbitrarySnapshotGenerator::default().snapshot(); + + let db_path = DbBuilder::new().with_snapshot(&snapshot).build(temp_dir)?; + + let server = ServerBootstrapper::new() + .with_db_path(db_path.to_str().unwrap()) + .start(temp_dir)?; + + if !server.is_up(&snapshot.token_hash()) { + return Err(ServerBootstrapperError::FailToBootstrap); + } + + Ok((server, snapshot)) +} + +pub fn empty_db(temp_dir: &TempDir) -> PathBuf { + DbBuilder::new().build(temp_dir).unwrap() +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/startup/server/bootstrap.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/startup/server/bootstrap.rs new file mode 100644 index 0000000000..81d5d8103d --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/startup/server/bootstrap.rs @@ -0,0 +1,123 @@ +use super::{BootstrapCommandBuilder, ServerSettingsBuilder}; +use crate::common::{ + paths::BLOCK0_BIN, + server::Server, + startup::{db::DbBuilderError, get_exe}, +}; +use assert_fs::fixture::PathChild; +use assert_fs::TempDir; +use std::path::PathBuf; +use std::process::Stdio; +use thiserror::Error; +use vit_servicing_station_lib_f10::server::settings::LogLevel; + +pub struct ServerBootstrapper { + settings_builder: ServerSettingsBuilder, + allowed_origins: Option, + service_version: String, +} + +impl ServerBootstrapper { + pub fn new() -> Self { + let mut settings_builder: ServerSettingsBuilder = Default::default(); + settings_builder + .with_random_localhost_address() + .with_block0_path(BLOCK0_BIN.to_string()); + + Self { + settings_builder, + allowed_origins: None, + service_version: Default::default(), + } + } + + pub fn with_localhost_address(&mut self, port: u32) -> &mut Self { + self.settings_builder.with_localhost_address(port); + self + } + + pub fn with_log_level(&mut self, log_level: LogLevel) -> &mut Self { + self.settings_builder.with_log_level(log_level); + self + } + + pub fn with_db_path>(&mut self, db_url: S) -> &mut Self { + self.settings_builder.with_db_path(db_url.into()); + self + } + + pub fn with_block0_path>(&mut self, block0_path: S) -> &mut Self { + self.settings_builder.with_block0_path(block0_path.into()); + self + } + + pub fn with_allowed_origins>(&mut self, allowed_origins: S) -> &mut Self { + self.allowed_origins = Some(allowed_origins.into()); + self + } + + pub fn with_api_tokens(&mut self, enabled: bool) -> &mut Self { + self.settings_builder.with_api_tokens(enabled); + self + } + + pub fn with_service_version(&mut self, service_version: String) -> &mut Self { + self.service_version = service_version; + self + } + + pub fn start_with_exe( + &self, + temp_dir: &TempDir, + exe: PathBuf, + ) -> Result { + let settings = self.settings_builder.build(); + let logger_file: PathBuf = temp_dir.child("log.log").path().into(); + let mut command_builder = BootstrapCommandBuilder::new(exe); + + command_builder + .address(&settings.address.to_string()) + .db_url(&settings.db_url) + .log_file(&logger_file) + .enable_api_tokens(settings.enable_api_tokens) + .block0_path(&settings.block0_path) + .service_version(&self.service_version); + + if let Some(allowed_origins) = self.allowed_origins.as_ref() { + command_builder.allowed_origins(allowed_origins); + } + + if let Some(log_level) = &settings.log.log_level { + command_builder.log_level(&serde_json::to_string(&log_level).unwrap()); + } + + let mut command = command_builder.build(); + println!("{:?}", command); + let child = command.stdout(Stdio::inherit()).spawn()?; + + std::thread::sleep(std::time::Duration::from_secs(1)); + Ok(Server::new(child, settings, logger_file)) + } + + pub fn start(&self, temp_dir: &TempDir) -> Result { + self.start_with_exe(temp_dir, get_exe()) + } +} + +impl Default for ServerBootstrapper { + fn default() -> Self { + Self::new() + } +} + +#[derive(Debug, Error)] +pub enum ServerBootstrapperError { + #[error("cannot spawn process")] + ProcessSpawnError(#[from] std::io::Error), + #[error("cannot find binary (0)")] + CargoError(#[from] assert_cmd::cargo::CargoError), + #[error("failed to bootstrap")] + FailToBootstrap, + #[error("database builder error")] + DbBuilderError(#[from] DbBuilderError), +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/startup/server/command.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/startup/server/command.rs new file mode 100644 index 0000000000..1442a6a559 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/startup/server/command.rs @@ -0,0 +1,181 @@ +use crate::common::startup::get_exe; +use std::path::Path; +use std::{path::PathBuf, process::Command}; + +/// In order to test robustness of server bootstrapper we need to be able +/// to provide some +pub struct BootstrapCommandBuilder { + exe: PathBuf, + address: Option, + allowed_origins: Option, + block0_path: Option, + cert_file: Option, + db_url: Option, + enable_api_tokens: bool, + in_settings_file: Option, + max_age_secs: Option, + out_settings_file: Option, + priv_key_file: Option, + log_file: Option, + log_level: Option, + service_version: Option, +} + +impl Default for BootstrapCommandBuilder { + fn default() -> Self { + Self::new(get_exe()) + } +} + +impl BootstrapCommandBuilder { + pub fn new(exe: PathBuf) -> Self { + Self { + exe, + address: None, + allowed_origins: None, + block0_path: None, + cert_file: None, + db_url: None, + enable_api_tokens: false, + in_settings_file: None, + max_age_secs: None, + out_settings_file: None, + priv_key_file: None, + log_file: None, + log_level: None, + service_version: None, + } + } + + pub fn address>(&mut self, address: S) -> &mut Self { + self.address = Some(address.into()); + self + } + + pub fn allowed_origins>(&mut self, allowed_origins: S) -> &mut Self { + self.allowed_origins = Some(allowed_origins.into()); + self + } + + pub fn block0_path>(&mut self, block0_path: S) -> &mut Self { + self.block0_path = Some(block0_path.into()); + self + } + + pub fn cert_file(&mut self, cert_file: &Path) -> &mut Self { + self.cert_file = Some(cert_file.to_path_buf()); + self + } + + pub fn db_url>(&mut self, db_url: S) -> &mut Self { + self.db_url = Some(db_url.into()); + self + } + + pub fn enable_api_tokens(&mut self, enabled: bool) -> &mut Self { + self.enable_api_tokens = enabled; + self + } + + pub fn in_settings_file(&mut self, in_settings_file: &Path) -> &mut Self { + self.in_settings_file = Some(in_settings_file.to_path_buf()); + self + } + pub fn max_age_secs(&mut self, max_age_secs: u32) -> &mut Self { + self.max_age_secs = Some(max_age_secs); + self + } + pub fn out_settings_file(&mut self, out_settings_file: &Path) -> &mut Self { + self.out_settings_file = Some(out_settings_file.to_path_buf()); + self + } + + pub fn priv_key_file(&mut self, priv_key_file: &Path) -> &mut Self { + self.priv_key_file = Some(priv_key_file.to_path_buf()); + self + } + + pub fn log_file(&mut self, log_file: &Path) -> &mut Self { + self.log_file = Some(log_file.to_path_buf()); + self + } + + pub fn log_level(&mut self, log_level: &str) -> &mut Self { + self.log_level = Some(log_level.to_string()); + self + } + + pub fn service_version>(&mut self, service_version: S) -> &mut Self { + self.service_version = Some(service_version.into()); + self + } + + pub fn build(&self) -> Command { + let mut command = Command::new(self.exe.clone()); + + let service_version = if let Some(service_version) = &self.service_version { + service_version.clone() + } else { + Default::default() + }; + command.arg("--service-version").arg(service_version); + + if let Some(address) = &self.address { + command.arg("--address").arg(address); + } + + if let Some(allowed_origins) = &self.allowed_origins { + command.arg("--allowed-origins").arg(allowed_origins); + } + + if let Some(block0_path) = &self.block0_path { + command.arg("--block0-path").arg(block0_path); + } + + if let Some(cert_file) = &self.cert_file { + command.arg("--cert-file").arg(cert_file.to_str().unwrap()); + } + + if let Some(db_url) = &self.db_url { + command.arg("--db-url").arg(db_url); + } + + if let Some(in_settings_file) = &self.in_settings_file { + command + .arg("--in-settings-file") + .arg(in_settings_file.to_str().unwrap()); + } + + if let Some(max_age_secs) = &self.max_age_secs { + command.arg("--max-age-secs").arg(max_age_secs.to_string()); + } + + if let Some(out_settings_file) = &self.out_settings_file { + command + .arg("--out-settings-file") + .arg(out_settings_file.to_str().unwrap()); + } + + if let Some(priv_key_file) = &self.priv_key_file { + command + .arg("--priv-key-file") + .arg(priv_key_file.to_str().unwrap()); + } + + if self.enable_api_tokens { + command.arg("--enable-api-tokens"); + } + + if let Some(log_file) = &self.log_file { + command + .arg("--log-output-path") + .arg(log_file.to_str().unwrap()); + } + + if let Some(log_level) = &self.log_level { + command.arg("--log-level").arg(log_level); + } + + command + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/startup/server/mod.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/startup/server/mod.rs new file mode 100644 index 0000000000..ca818101f0 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/startup/server/mod.rs @@ -0,0 +1,7 @@ +mod bootstrap; +mod command; +mod settings; + +pub use bootstrap::{ServerBootstrapper, ServerBootstrapperError}; +pub use command::BootstrapCommandBuilder; +pub use settings::{dump_settings, load_settings, ServerSettingsBuilder}; diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/startup/server/settings.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/startup/server/settings.rs new file mode 100644 index 0000000000..d2ef8097b2 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/common/startup/server/settings.rs @@ -0,0 +1,77 @@ +use crate::common::startup::get_available_port; +use assert_fs::{fixture::PathChild, TempDir}; +use std::{ + net::SocketAddr, + path::{Path, PathBuf}, +}; +use vit_servicing_station_lib_f10::server::settings::{ + dump_settings_to_file, load_settings_from_file, Cors, LogLevel, ServiceSettings, +}; + +#[derive(Default)] +pub struct ServerSettingsBuilder { + settings: ServiceSettings, + cors: Option, +} + +impl ServerSettingsBuilder { + pub fn with_random_localhost_address(&mut self) -> &mut Self { + self.with_localhost_address(get_available_port()); + self + } + + pub fn with_localhost_address(&mut self, port: u32) -> &mut Self { + self.settings.address = Self::format_localhost_address(port); + self + } + + fn format_localhost_address(port: u32) -> SocketAddr { + format!("127.0.0.1:{}", port).parse().unwrap() + } + + pub fn with_db_path>(&mut self, db_url: S) -> &mut Self { + self.settings.db_url = db_url.into(); + self + } + + pub fn with_block0_path>(&mut self, block0_path: S) -> &mut Self { + self.settings.block0_path = block0_path.into(); + self + } + + pub fn with_cors(&mut self, cors: Cors) -> &mut Self { + self.cors = Some(cors); + self + } + + pub fn with_api_tokens(&mut self, enabled: bool) -> &mut Self { + self.settings.enable_api_tokens = enabled; + self + } + + pub fn with_log_output_path(&mut self, path: PathBuf) -> &mut Self { + self.settings.log.log_output_path = Some(path); + self + } + + pub fn with_log_level(&mut self, log_level: LogLevel) -> &mut Self { + self.settings.log.log_level = Some(log_level); + self + } + + pub fn build(&self) -> ServiceSettings { + self.settings.clone() + } +} + +///todo: add error mapping +pub fn dump_settings(temp_dir: &TempDir, settings: &ServiceSettings) -> PathBuf { + let child_path = temp_dir.child("settings.json"); + dump_settings_to_file(child_path.path().to_str().unwrap(), settings).unwrap(); + child_path.path().into() +} + +///todo: add error mapping +pub fn load_settings>(path: P) -> ServiceSettings { + load_settings_from_file(path.as_ref().to_str().unwrap()).unwrap() +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/lib.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/lib.rs new file mode 100644 index 0000000000..8bd5ba3819 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/lib.rs @@ -0,0 +1,7 @@ +pub mod common; + +#[cfg(test)] +pub mod tests; + +#[cfg(test)] +extern crate lazy_static; diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/bootstrap/arguments/address.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/bootstrap/arguments/address.rs new file mode 100644 index 0000000000..5d072877bf --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/bootstrap/arguments/address.rs @@ -0,0 +1,43 @@ +use crate::common::startup::{quick_start, server::BootstrapCommandBuilder}; +use assert_cmd::assert::OutputAssertExt; +use assert_fs::TempDir; + +#[test] +pub fn address_with_schema() { + let mut command_builder: BootstrapCommandBuilder = Default::default(); + command_builder + .address("http://127.0.0.1:8080") + .build() + .assert() + .failure() + .code(1); +} + +#[test] +pub fn address_with_domain() { + let mut command_builder: BootstrapCommandBuilder = Default::default(); + command_builder + .address("http://localhost:8080") + .build() + .assert() + .failure() + .code(1); +} + +#[test] +pub fn port_already_in_use() -> Result<(), Box> { + let temp_dir = TempDir::new().unwrap(); + let (server, _) = quick_start(&temp_dir)?; + + let settings = server.settings(); + let mut command_builder: BootstrapCommandBuilder = Default::default(); + command_builder + .address(settings.address.to_string()) + .db_url(settings.db_url) + .block0_path(settings.block0_path) + .build() + .assert() + .failure() + .code(101); + Ok(()) +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/bootstrap/arguments/block0.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/bootstrap/arguments/block0.rs new file mode 100644 index 0000000000..e3dc4b1290 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/bootstrap/arguments/block0.rs @@ -0,0 +1,50 @@ +use crate::common::{ + data, + startup::{db::DbBuilder, server::ServerBootstrapper}, +}; +use assert_fs::TempDir; + +#[test] +pub fn non_existing_block0_file() -> Result<(), Box> { + let temp_dir = TempDir::new().unwrap(); + let snapshot = data::ArbitrarySnapshotGenerator::default().snapshot(); + let db_path = DbBuilder::new().with_snapshot(&snapshot).build(&temp_dir)?; + + let server = ServerBootstrapper::new() + .with_db_path(db_path.to_str().unwrap()) + .start(&temp_dir)?; + + assert!(server.is_up(&snapshot.any_token().0)); + Ok(()) +} + +#[test] +pub fn malformed_path() -> Result<(), Box> { + let temp_dir = TempDir::new().unwrap(); + let snapshot = data::ArbitrarySnapshotGenerator::default().snapshot(); + let db_path = DbBuilder::new().with_snapshot(&snapshot).build(&temp_dir)?; + + let server = ServerBootstrapper::new() + .with_db_path(db_path.to_str().unwrap()) + .with_block0_path("C:/tmp/a:/block0.bin") + .start(&temp_dir)?; + + assert!(server.is_up(&snapshot.any_token().0)); + Ok(()) +} + +#[test] +#[cfg(not(windows))] +pub fn network_path() -> Result<(), Box> { + let temp_dir = TempDir::new().unwrap(); + let snapshot = data::ArbitrarySnapshotGenerator::default().snapshot(); + let db_path = DbBuilder::new().with_snapshot(&snapshot).build(&temp_dir)?; + + let server = ServerBootstrapper::new() + .with_db_path(db_path.to_str().unwrap()) + .with_block0_path("//tmp/block0.bin") + .start(&temp_dir)?; + + assert!(server.is_up(&snapshot.any_token().0)); + Ok(()) +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/bootstrap/arguments/db_url.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/bootstrap/arguments/db_url.rs new file mode 100644 index 0000000000..f28d6254c1 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/bootstrap/arguments/db_url.rs @@ -0,0 +1,25 @@ +use crate::common::startup::server::BootstrapCommandBuilder; +use assert_cmd::assert::OutputAssertExt; +use vit_servicing_station_lib_f10::server::exit_codes::ApplicationExitCode; + +#[test] +pub fn malformed_path() { + let mut command_builder: BootstrapCommandBuilder = Default::default(); + command_builder + .db_url("C:/tmp/a:/databse.db") + .build() + .assert() + .failure() + .code(ApplicationExitCode::DbConnectionError as i32); +} + +#[test] +pub fn path_doesnt_exist() { + let mut command_builder: BootstrapCommandBuilder = Default::default(); + command_builder + .db_url("C:/foo.db") + .build() + .assert() + .failure() + .code(ApplicationExitCode::DbConnectionError as i32); +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/bootstrap/arguments/in_settings.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/bootstrap/arguments/in_settings.rs new file mode 100644 index 0000000000..eb970f5a81 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/bootstrap/arguments/in_settings.rs @@ -0,0 +1,110 @@ +use crate::common::{ + data, + paths::BLOCK0_BIN, + startup::{ + db::DbBuilder, + empty_db, + server::{dump_settings, BootstrapCommandBuilder, ServerSettingsBuilder}, + }, +}; +use assert_cmd::assert::OutputAssertExt; +use assert_fs::TempDir; +use std::{ + fs::{self, File}, + io::Read, + path::{Path, PathBuf}, + str::FromStr, +}; +use vit_servicing_station_lib_f10::server::exit_codes::ApplicationExitCode; + +#[test] +pub fn no_in_settings_provided() { + let command_builder: BootstrapCommandBuilder = Default::default(); + command_builder + .build() + .assert() + .failure() + .code(ApplicationExitCode::DbConnectionError as i32); +} + +#[test] +pub fn in_settings_file_does_not_exist() { + let mut command_builder: BootstrapCommandBuilder = Default::default(); + + let non_existing_file = PathBuf::from_str("settings.json").unwrap(); + + command_builder + .in_settings_file(&non_existing_file) + .build() + .assert() + .failure() + .code(ApplicationExitCode::LoadSettingsError as i32); +} + +#[test] +pub fn in_settings_file_malformed() { + let temp_dir = TempDir::new().unwrap(); + + let mut settings_builder: ServerSettingsBuilder = Default::default(); + let settings = settings_builder + .with_random_localhost_address() + .with_db_path(empty_db(&temp_dir).to_str().unwrap()) + .with_block0_path(BLOCK0_BIN) + .build(); + + let settings_file = dump_settings(&temp_dir, &settings); + remove_first_char_in_file(&settings_file); + + let mut command_builder: BootstrapCommandBuilder = Default::default(); + command_builder + .in_settings_file(&settings_file) + .build() + .assert() + .failure() + .code(ApplicationExitCode::LoadSettingsError as i32); +} + +pub fn remove_first_char_in_file(settings_file: &Path) { + let mut file = File::open(settings_file).unwrap(); + let mut contents = String::new(); + file.read_to_string(&mut contents).unwrap(); + drop(file); + contents.remove(0); + fs::write(settings_file, contents).expect("Unable to write file"); +} + +#[test] +pub fn in_settings_file_with_malformed_path() { + let mut command_builder: BootstrapCommandBuilder = Default::default(); + + let non_existing_file = PathBuf::from_str("/tmp/a/c:/settings.json").unwrap(); + + command_builder + .in_settings_file(&non_existing_file) + .build() + .assert() + .failure() + .code(ApplicationExitCode::LoadSettingsError as i32); +} + +#[test] +pub fn db_url_and_block0_replaced() -> Result<(), Box> { + let temp_dir = TempDir::new().unwrap(); + + let (_, token) = data::token(); + + let db_path = DbBuilder::new() + .with_token(token) + .with_proposals(data::proposals()) + .build(&temp_dir)?; + + let mut command_builder: BootstrapCommandBuilder = Default::default(); + command_builder + .block0_path(db_path.to_str().unwrap()) + .db_url(BLOCK0_BIN) + .build() + .assert() + .failure() + .code(ApplicationExitCode::DbConnectionError as i32); + Ok(()) +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/bootstrap/arguments/logger.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/bootstrap/arguments/logger.rs new file mode 100644 index 0000000000..bfc21f9b17 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/bootstrap/arguments/logger.rs @@ -0,0 +1,64 @@ +use crate::common::startup::server::BootstrapCommandBuilder; +use assert_cmd::assert::OutputAssertExt; + +#[cfg(windows)] +use crate::common::{ + paths::BLOCK0_BIN, + startup::{ + empty_db, + server::{dump_settings, ServerSettingsBuilder}, + }, +}; +#[cfg(windows)] +use assert_fs::TempDir; +#[cfg(windows)] +use std::path::PathBuf; +#[cfg(windows)] +use vit_servicing_station_lib_f10::server::exit_codes::ApplicationExitCode; + +#[test] +pub fn wrong_log_level_provided() { + let mut command_builder: BootstrapCommandBuilder = Default::default(); + command_builder + .log_level("traceble") + .build() + .assert() + .failure() + .code(1); +} + +#[test] +#[cfg(windows)] +pub fn malformed_logger_path_provided() { + let mut command_builder: BootstrapCommandBuilder = Default::default(); + command_builder + .log_file(&PathBuf::from("c:\\a\\d:\\logger")) + .build() + .assert() + .failure() + .code(ApplicationExitCode::LoadSettingsError as i32); +} + +#[test] +#[cfg(windows)] +pub fn in_settings_file_malformed_log_output_path() { + let temp_dir = TempDir::new().unwrap(); + + let mut settings_builder: ServerSettingsBuilder = Default::default(); + let settings = settings_builder + .with_random_localhost_address() + .with_db_path(empty_db(&temp_dir).to_str().unwrap()) + .with_block0_path(BLOCK0_BIN) + .with_log_output_path(PathBuf::from("c:\\a\\d:\\logger")) + .build(); + + let settings_file = dump_settings(&temp_dir, &settings); + let mut command_builder: BootstrapCommandBuilder = Default::default(); + + command_builder + .in_settings_file(&settings_file) + .build() + .assert() + .failure() + .code(ApplicationExitCode::LoadSettingsError as i32); +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/bootstrap/arguments/mod.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/bootstrap/arguments/mod.rs new file mode 100644 index 0000000000..e9121d61a9 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/bootstrap/arguments/mod.rs @@ -0,0 +1,6 @@ +pub mod address; +pub mod block0; +pub mod db_url; +pub mod in_settings; +pub mod logger; +pub mod out_settings; diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/bootstrap/arguments/out_settings.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/bootstrap/arguments/out_settings.rs new file mode 100644 index 0000000000..2fd74c97f0 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/bootstrap/arguments/out_settings.rs @@ -0,0 +1,80 @@ +use crate::common::{ + paths::BLOCK0_BIN, + startup::{ + empty_db, + server::{dump_settings, load_settings, BootstrapCommandBuilder, ServerSettingsBuilder}, + }, +}; +use assert_cmd::assert::OutputAssertExt; +use assert_fs::{fixture::PathChild, TempDir}; +use std::path::PathBuf; +use vit_servicing_station_lib_f10::server::settings::LogLevel; +use vit_servicing_station_lib_f10::server::settings::ServiceSettings; + +#[test] +pub fn out_settings_provided() { + let temp_dir = TempDir::new().unwrap(); + + let (in_settings_file, settings) = example_settings_file(&temp_dir); + let out_settings_file = temp_dir.child("out_settings.json"); + + let mut command_builder: BootstrapCommandBuilder = Default::default(); + command_builder + .in_settings_file(&in_settings_file) + .out_settings_file(out_settings_file.path()) + .build() + .assert() + .success(); + + let actual_settings = load_settings(&out_settings_file.path()); + assert_eq!(settings, actual_settings); +} + +#[test] +pub fn out_settings_file_override() { + let temp_dir = TempDir::new().unwrap(); + let mut command_builder: BootstrapCommandBuilder = Default::default(); + + let (in_settings_file, _) = example_settings_file(&temp_dir); + + command_builder + .in_settings_file(&in_settings_file) + .out_settings_file(&in_settings_file) + .build() + .assert() + .success(); +} + +#[test] +#[ignore = "issue in comment for https://github.com/input-output-hk/vit-servicing-station/pull/75"] +pub fn out_settings_file_from_cmdline() { + let temp_dir = TempDir::new().unwrap().into_persistent(); + let (_, settings) = example_settings_file(&temp_dir); + let out_settings_file = temp_dir.child("settings.json"); + + let mut command_builder: BootstrapCommandBuilder = Default::default(); + command_builder + .db_url(&settings.db_url) + .block0_path(&settings.block0_path) + .address(settings.address.to_string()) + .out_settings_file(out_settings_file.path()) + .build() + .assert() + .success(); + + let actual_settings = load_settings(&out_settings_file.path()); + assert_eq!(settings, actual_settings); +} + +fn example_settings_file(temp_dir: &TempDir) -> (PathBuf, ServiceSettings) { + let mut settings_builder: ServerSettingsBuilder = Default::default(); + let settings = settings_builder + .with_random_localhost_address() + .with_db_path(empty_db(temp_dir).to_str().unwrap()) + .with_block0_path(BLOCK0_BIN) + .with_log_level(LogLevel::Info) + .with_log_output_path(temp_dir.child("logger.log").path().into()) + .build(); + let settings_file = dump_settings(temp_dir, &settings); + (settings_file, settings) +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/bootstrap/mod.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/bootstrap/mod.rs new file mode 100644 index 0000000000..deeac03611 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/bootstrap/mod.rs @@ -0,0 +1,22 @@ +use crate::common::{ + data, + startup::{db::DbBuilder, server::ServerBootstrapper}, +}; +use assert_fs::TempDir; + +pub mod arguments; + +#[test] +pub fn bootstrap_with_random_data() -> Result<(), Box> { + let temp_dir = TempDir::new().unwrap().into_persistent(); + let snapshot = data::ArbitrarySnapshotGenerator::default().snapshot(); + let db_path = DbBuilder::new().with_snapshot(&snapshot).build(&temp_dir)?; + + let server = ServerBootstrapper::new() + .with_db_path(db_path.to_str().unwrap()) + .start(&temp_dir)?; + + std::thread::sleep(std::time::Duration::from_secs(1)); + assert!(server.is_up(&snapshot.any_token().0)); + Ok(()) +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/cli/db.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/cli/db.rs new file mode 100644 index 0000000000..cf19d33b95 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/cli/db.rs @@ -0,0 +1,29 @@ +use crate::common::cli::VitCliCommand; +use crate::common::startup::server::ServerBootstrapper; +use assert_cmd::assert::OutputAssertExt; +use assert_fs::assert::PathAssert; +use assert_fs::{fixture::PathChild, TempDir}; +use jortestkit::prelude::file_exists_and_not_empty; +#[test] +pub fn genereate_empty_db() { + let temp_dir = TempDir::new().unwrap(); + let db_file = temp_dir.child("db.sqlite"); + let vit_cli: VitCliCommand = Default::default(); + vit_cli + .db() + .init() + .db_url(db_file.path()) + .build() + .assert() + .success(); + + db_file.assert(file_exists_and_not_empty()); + + let server = ServerBootstrapper::new() + .with_db_path(db_file.path().to_str().unwrap()) + .start(&temp_dir) + .unwrap(); + + std::thread::sleep(std::time::Duration::from_secs(1)); + assert!(server.rest_client().health().is_ok()); +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/cli/load.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/cli/load.rs new file mode 100644 index 0000000000..8a75c8cadd --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/cli/load.rs @@ -0,0 +1,97 @@ +use crate::common::data::{multivoteplan_snapshot, ArbitrarySnapshotGenerator}; +use crate::common::{ + cli::VitCliCommand, + data::CsvConverter, + startup::{db::DbBuilder, server::ServerBootstrapper}, +}; +use assert_cmd::assert::OutputAssertExt; +use assert_fs::{fixture::PathChild, TempDir}; + +#[test] +pub fn load_data_test() { + let temp_dir = TempDir::new().unwrap(); + let db_file = temp_dir.child("db.sqlite"); + let snapshot = ArbitrarySnapshotGenerator::default().snapshot(); + + let csv_converter = CsvConverter; + + let funds = temp_dir.child("funds.csv"); + csv_converter.funds(snapshot.funds(), funds.path()).unwrap(); + + let proposals = temp_dir.child("proposals.csv"); + csv_converter + .proposals( + snapshot.proposals().iter().take(1).cloned().collect(), + proposals.path(), + ) + .unwrap(); + + let voteplans = temp_dir.child("voteplans.csv"); + csv_converter + .voteplans( + snapshot.voteplans().iter().take(1).cloned().collect(), + voteplans.path(), + ) + .unwrap(); + + let challenges = temp_dir.child("challenges.csv"); + csv_converter + .challenges( + snapshot.challenges().iter().take(1).cloned().collect(), + challenges.path(), + ) + .unwrap(); + + let reviews = temp_dir.child("reviews.csv"); + csv_converter + .advisor_reviews(snapshot.advisor_reviews(), reviews.path()) + .unwrap(); + + let goals = temp_dir.child("goals.csv"); + csv_converter + .goals( + snapshot.goals().iter().map(From::from).collect(), + goals.path(), + ) + .unwrap(); + + let vit_cli: VitCliCommand = Default::default(); + vit_cli + .db() + .init() + .db_url(db_file.path()) + .build() + .assert() + .success(); + + let vit_cli: VitCliCommand = Default::default(); + vit_cli + .csv_data() + .load() + .db_url(db_file.path()) + .funds(funds.path()) + .proposals(proposals.path()) + .voteplans(voteplans.path()) + .challenges(challenges.path()) + .advisor_reviews(reviews.path()) + .goals(goals.path()) + .build() + .assert() + .success(); + + let server = ServerBootstrapper::new() + .with_db_path(db_file.path().to_str().unwrap()) + .start(&temp_dir) + .unwrap(); + + std::thread::sleep(std::time::Duration::from_secs(1)); + assert!(server.rest_client().health().is_ok()); +} + +#[test] +pub fn voting_snapshot_build() { + let temp_dir = TempDir::new().unwrap().into_persistent(); + let mut db_builder = DbBuilder::new(); + db_builder.with_snapshot(&multivoteplan_snapshot()); + db_builder.build(&temp_dir).unwrap(); +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/cli/mod.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/cli/mod.rs new file mode 100644 index 0000000000..4c82364f95 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/cli/mod.rs @@ -0,0 +1,3 @@ +pub mod db; +pub mod load; +pub mod token; diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/cli/token.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/cli/token.rs new file mode 100644 index 0000000000..a429e4e5a6 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/cli/token.rs @@ -0,0 +1,116 @@ +use crate::common::{ + cli::{VitCli, VitCliCommand}, + clients::RawRestClient, + startup::quick_start, +}; +use assert_cmd::assert::OutputAssertExt; +use assert_fs::{fixture::PathChild, TempDir}; +use hyper::StatusCode; +use jortestkit::process::output_extensions::ProcessOutput; +use std::error::Error; + +#[test] +pub fn generate_token() { + let vit_cli: VitCliCommand = Default::default(); + let output = vit_cli + .api_token() + .generate() + .n(2) + .build() + .assert() + .success() + .get_output() + .as_multi_line(); + + assert_eq!(2, output.len()); + + for line in output { + assert_eq!(14, line.len()) + } +} + +#[test] +pub fn generate_token_for_given_size_and_n() { + let vit_cli: VitCliCommand = Default::default(); + let output = vit_cli + .api_token() + .generate() + .n(3) + .size(15) + .build() + .assert() + .success() + .get_output() + .as_multi_line(); + + assert_eq!(3, output.len()); + + for line in output { + assert_eq!(20, line.len()) + } +} + +#[test] +pub fn add_generated_token_to_db() -> Result<(), Box> { + let temp_dir = TempDir::new().unwrap(); + let (server, _snapshot) = quick_start(&temp_dir).unwrap(); + + let vit_cli: VitCli = Default::default(); + let tokens = vit_cli.generate_tokens(1); + + let vit_cli: VitCliCommand = Default::default(); + vit_cli + .api_token() + .add() + .db_url(server.settings().db_url) + .tokens(&tokens) + .build() + .assert() + .success(); + + let first_token = tokens.get(0).unwrap(); + let raw: RawRestClient = server.rest_client_with_token(first_token).into(); + + assert_eq!(raw.health()?.status(), StatusCode::OK); + Ok(()) +} + +#[test] +pub fn add_generated_token_to_db_negative() -> Result<(), Box> { + let temp_dir = TempDir::new().unwrap().into_persistent(); + let (server, _snapshot) = quick_start(&temp_dir).unwrap(); + + let vit_cli: VitCli = Default::default(); + let tokens = vit_cli.generate_tokens(1); + + let vit_cli: VitCliCommand = Default::default(); + vit_cli + .api_token() + .add() + .db_url(temp_dir.child("fake.db").path().to_str().unwrap()) + .tokens(&tokens) + .build() + .assert() + .failure(); + + let vit_cli: VitCliCommand = Default::default(); + vit_cli + .api_token() + .add() + .db_url(server.settings().db_url) + .tokens_as_str("some_random_token") + .build() + .assert() + .failure(); + + let vit_cli: VitCliCommand = Default::default(); + vit_cli + .api_token() + .add() + .db_url(server.settings().db_url) + .tokens_as_str("randomtoken1;randomtoken2") + .build() + .assert() + .failure(); + Ok(()) +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/data/mod.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/data/mod.rs new file mode 100644 index 0000000000..102ad04ce2 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/data/mod.rs @@ -0,0 +1,36 @@ +use crate::common::data::{ + ArbitraryValidVotingTemplateGenerator, CurrentFund, ValidVotePlanGenerator, + ValidVotePlanParameters, +}; +use crate::common::startup::{db::DbBuilder, server::ServerBootstrapper}; +use assert_fs::TempDir; +use chain_impl_mockchain::testing::scenario::template::VotePlanDef; +use chain_impl_mockchain::testing::VoteTestGen; + +#[test] +pub fn bootstrap_with_valid_data() -> Result<(), Box> { + let temp_dir = TempDir::new().unwrap().into_persistent(); + let vote_plan = ValidVotePlanParameters::from(CurrentFund::from_single( + VotePlanDef::from_vote_plan( + "test", + Some("owner"), + &VoteTestGen::vote_plan_with_proposals(30), + ), + Default::default(), + )); + let snapshot = ValidVotePlanGenerator::new(vote_plan) + .build(&mut ArbitraryValidVotingTemplateGenerator::new()); + let db_path = DbBuilder::new().with_snapshot(&snapshot).build(&temp_dir)?; + + let server = ServerBootstrapper::new() + .with_db_path(db_path.to_str().unwrap()) + .start(&temp_dir)?; + std::thread::sleep(std::time::Duration::from_secs(1)); + assert!(server.is_up(&snapshot.any_token().0)); + + let rest_client = server.rest_client_with_token(&snapshot.token_hash()); + assert!(rest_client.proposals().is_ok()); + assert!(rest_client.challenges().is_ok()); + assert!(rest_client.funds().is_ok()); + Ok(()) +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/mod.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/mod.rs new file mode 100644 index 0000000000..b86078b4dc --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/mod.rs @@ -0,0 +1,6 @@ +pub mod bootstrap; +pub mod cli; +pub mod data; +#[cfg(feature = "non-functional")] +pub mod non_functional; +pub mod rest; diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/non_functional/mod.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/non_functional/mod.rs new file mode 100644 index 0000000000..3f1e667d2e --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/non_functional/mod.rs @@ -0,0 +1,16 @@ +use jortestkit::load::{Id, RequestStatusProvider, Status}; +use std::time::Duration; + +mod short; +#[cfg(feature = "soak")] +mod soak; + +struct MockStatusProvider; + +impl RequestStatusProvider for MockStatusProvider { + fn get_statuses(&self, ids: &[Id]) -> Vec { + ids.iter() + .map(|id| Status::new_success(Duration::from_millis(10), id.clone())) + .collect() + } +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/non_functional/short.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/non_functional/short.rs new file mode 100644 index 0000000000..8565e290e8 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/non_functional/short.rs @@ -0,0 +1,85 @@ +use super::MockStatusProvider; +use crate::common::{ + load::{VitRestRequestGenerator, VotingPowerRequestGenerator}, + snapshot::{SnapshotBuilder, SnapshotUpdater}, + startup::quick_start, +}; +use assert_fs::TempDir; +use jortestkit::load::{self, ConfigurationBuilder, Monitor}; +use std::time::Duration; + +#[test] +pub fn update_snapshot_during_the_load_quick() { + let temp_dir = TempDir::new().unwrap(); + let (server, data) = quick_start(&temp_dir).unwrap(); + let rest_client = server.rest_client_with_token(&data.token_hash()); + + let snapshot = SnapshotBuilder::default() + .with_entries_count(10_000) + .build(); + + rest_client.put_snapshot(&snapshot).unwrap(); + + let request = VotingPowerRequestGenerator::new(snapshot.clone(), rest_client.clone()); + let config = ConfigurationBuilder::duration(Duration::from_secs(40)) + .thread_no(10) + .step_delay(Duration::from_millis(500)) + .monitor(Monitor::Progress(100)) + .build(); + + let load_run = load::start_background_async( + request, + MockStatusProvider, + config, + "Vit station snapshot service rest", + ); + + std::thread::sleep(std::time::Duration::from_secs(10)); + + let new_snapshot = SnapshotUpdater::from(snapshot) + .update_voting_power() + .add_new_arbitrary_voters() + .build(); + + rest_client.put_snapshot(&new_snapshot).unwrap(); + + let stats = load_run.wait_for_finish(); + assert!((stats.calculate_passrate() as u32) > 95); +} + +#[test] +pub fn rest_snapshot_load_quick() { + let temp_dir = TempDir::new().unwrap(); + let (server, data) = quick_start(&temp_dir).unwrap(); + let rest_client = server.rest_client_with_token(&data.token_hash()); + + let snapshot = SnapshotBuilder::default() + .with_entries_count(10_000) + .build(); + + let request = VotingPowerRequestGenerator::new(snapshot, rest_client); + let config = ConfigurationBuilder::duration(Duration::from_secs(40)) + .thread_no(10) + .step_delay(Duration::from_millis(500)) + .monitor(Monitor::Progress(100)) + .build(); + let stats = load::start_sync(request, config, "Vit station snapshot service rest"); + assert!((stats.calculate_passrate() as u32) > 95); +} + +#[test] +pub fn rest_load_quick() { + let temp_dir = TempDir::new().unwrap(); + let (server, snapshot) = quick_start(&temp_dir).unwrap(); + + let rest_client = server.rest_client(); + + let request = VitRestRequestGenerator::new(snapshot, rest_client); + let config = ConfigurationBuilder::duration(Duration::from_secs(40)) + .thread_no(10) + .step_delay(Duration::from_millis(500)) + .monitor(Monitor::Progress(100)) + .build(); + let stats = load::start_sync(request, config, "Vit station service rest"); + assert!((stats.calculate_passrate() as u32) > 95); +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/non_functional/soak.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/non_functional/soak.rs new file mode 100644 index 0000000000..06003dddc2 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/non_functional/soak.rs @@ -0,0 +1,47 @@ +use crate::common::{ + load::{VitRestRequestGenerator, VotingPowerRequestGenerator}, + snapshot::SnapshotBuilder, + startup::quick_start, +}; +use assert_fs::TempDir; +use jortestkit::load::{self, ConfigurationBuilder, Monitor}; +use std::time::Duration; + +#[test] +#[cfg(feature = "soak")] +pub fn rest_snapshot_load_long() { + let temp_dir = TempDir::new().unwrap(); + let (server, data) = quick_start(&temp_dir).unwrap(); + let rest_client = server.rest_client_with_token(&data.token_hash()); + + let snapshot = SnapshotBuilder::default() + .with_entries_count(10_000) + .build(); + + let request = VotingPowerRequestGenerator::new(snapshot, rest_client); + let config = ConfigurationBuilder::duration(Duration::from_secs(18_000)) + .thread_no(3) + .step_delay(Duration::from_secs(1)) + .monitor(Monitor::Progress(10_000)) + .build(); + let stats = load::start_sync(request, config, "Vit station snapshot service rest"); + assert!((stats.calculate_passrate() as u32) > 95); +} + +#[test] +#[cfg(feature = "soak")] +pub fn rest_load_long() { + let temp_dir = TempDir::new().unwrap(); + let (server, snapshot) = quick_start(&temp_dir).unwrap(); + + let rest_client = server.rest_client(); + + let request = VitRestRequestGenerator::new(snapshot, rest_client); + let config = ConfigurationBuilder::duration(Duration::from_secs(18_000)) + .thread_no(3) + .step_delay(Duration::from_secs(1)) + .monitor(Monitor::Progress(10_000)) + .build(); + let stats = load::start_sync(request, config, "Vit station service rest"); + assert!((stats.calculate_passrate() as u32) > 95); +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/rest/advisor_reviews.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/rest/advisor_reviews.rs new file mode 100644 index 0000000000..c04c965997 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/rest/advisor_reviews.rs @@ -0,0 +1,66 @@ +use crate::common::{ + clients::RawRestClient, + data, + startup::{db::DbBuilder, server::ServerBootstrapper}, +}; +use assert_fs::TempDir; +use reqwest::StatusCode; +use vit_servicing_station_lib_f10::db::models::community_advisors_reviews::{ + AdvisorReview, ReviewRanking, +}; + +#[test] +pub fn get_advisor_reviews() -> Result<(), Box> { + use pretty_assertions::assert_eq; + let temp_dir = TempDir::new().unwrap().into_persistent(); + let proposal_id = 1234; + let expected_review = AdvisorReview { + id: 1, + proposal_id, + assessor: "za_foo_bar".to_string(), + impact_alignment_rating_given: 0, + impact_alignment_note: "impact note".to_string(), + feasibility_rating_given: 0, + feasibility_note: "feasibility note".to_string(), + auditability_rating_given: 0, + auditability_note: "auditability note".to_string(), + ranking: ReviewRanking::Good, + }; + let (hash, token) = data::token(); + + let db_path = DbBuilder::new() + .with_token(token) + .with_advisor_reviews(vec![expected_review.clone()]) + .build(&temp_dir)?; + + let server = ServerBootstrapper::new() + .with_db_path(db_path.to_str().unwrap()) + .start(&temp_dir)?; + + let rest_client = server.rest_client_with_token(&hash); + + let actual_review = rest_client.advisor_reviews(&expected_review.proposal_id.to_string())?; + assert_eq!( + expected_review, + actual_review.get(&expected_review.assessor).unwrap()[0] + ); + + // non existing + let empty_reviews = rest_client.advisor_reviews("0")?; + assert!(empty_reviews.is_empty()); + + let rest_client: RawRestClient = server.rest_client_with_token(&hash).into(); + + // malformed index + assert_eq!( + rest_client.advisor_reviews("a")?.status(), + StatusCode::NOT_FOUND + ); + // overflow index + assert_eq!( + rest_client.fund("3147483647999")?.status(), + StatusCode::NOT_FOUND + ); + + Ok(()) +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/rest/cors.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/rest/cors.rs new file mode 100644 index 0000000000..91c2cf7df7 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/rest/cors.rs @@ -0,0 +1,153 @@ +use crate::common::{ + clients::RawRestClient, + data::ArbitrarySnapshotGenerator, + startup::{ + db::DbBuilder, + server::{BootstrapCommandBuilder, ServerBootstrapper}, + }, +}; +use assert_cmd::assert::OutputAssertExt; +use assert_fs::TempDir; + +#[test] +pub fn cors_illegal_domain() -> Result<(), Box> { + let temp_dir = TempDir::new().unwrap(); + let snapshot = ArbitrarySnapshotGenerator::default().snapshot(); + let db_path = DbBuilder::new().with_snapshot(&snapshot).build(&temp_dir)?; + + let server = ServerBootstrapper::new() + .with_db_path(db_path.to_str().unwrap()) + .with_allowed_origins("http://domain.com") + .start(&temp_dir)?; + + let mut rest_client = server.rest_client_with_token(&snapshot.token_hash()); + rest_client.set_origin("http://other_domain.com"); + + assert_request_failed_due_to_cors(&rest_client.into())?; + Ok(()) +} + +fn assert_request_failed_due_to_cors( + rest_client: &RawRestClient, +) -> Result<(), Box> { + assert_eq!( + rest_client.funds()?.text()?, + "CORS request forbidden: origin not allowed" + ); + Ok(()) +} + +#[test] +pub fn cors_malformed_domain_no_http() -> Result<(), Box> { + let temp_dir = TempDir::new().unwrap(); + let snapshot = ArbitrarySnapshotGenerator::default().snapshot(); + let db_path = DbBuilder::new().with_snapshot(&snapshot).build(&temp_dir)?; + + let mut command_builder: BootstrapCommandBuilder = Default::default(); + command_builder + .db_url(db_path.to_str().unwrap()) + .allowed_origins("domain.com") + .build() + .assert() + .failure(); + Ok(()) +} + +#[test] +pub fn cors_ip_versus_domain() -> Result<(), Box> { + let temp_dir = TempDir::new().unwrap(); + let snapshot = ArbitrarySnapshotGenerator::default().snapshot(); + let db_path = DbBuilder::new().with_snapshot(&snapshot).build(&temp_dir)?; + + let server = ServerBootstrapper::new() + .with_db_path(db_path.to_str().unwrap()) + .with_allowed_origins("http://127.0.0.1") + .start(&temp_dir)?; + + let mut rest_client = server.rest_client_with_token(&snapshot.token_hash()); + rest_client.set_origin("http://localhost"); + + assert_request_failed_due_to_cors(&rest_client.into())?; + + Ok(()) +} + +#[test] +pub fn cors_wrong_delimiter() -> Result<(), Box> { + let temp_dir = TempDir::new().unwrap(); + let snapshot = ArbitrarySnapshotGenerator::default().snapshot(); + let db_path = DbBuilder::new().with_snapshot(&snapshot).build(&temp_dir)?; + + let mut command_builder: BootstrapCommandBuilder = Default::default(); + command_builder + .db_url(db_path.to_str().unwrap()) + .allowed_origins("http://domain.com,http://other_domain.com") + .build() + .assert() + .failure(); + Ok(()) +} + +#[test] +pub fn cors_single_domain() -> Result<(), Box> { + let temp_dir = TempDir::new().unwrap(); + let snapshot = ArbitrarySnapshotGenerator::default().snapshot(); + let db_path = DbBuilder::new().with_snapshot(&snapshot).build(&temp_dir)?; + + let server = ServerBootstrapper::new() + .with_db_path(db_path.to_str().unwrap()) + .with_allowed_origins("http://domain.com") + .start(&temp_dir)?; + + let mut rest_client: RawRestClient = + server.rest_client_with_token(&snapshot.token_hash()).into(); + rest_client.set_origin("http://domain.com"); + + assert!(rest_client.funds()?.status().is_success()); + + Ok(()) +} + +#[test] +pub fn cors_https() -> Result<(), Box> { + let temp_dir = TempDir::new().unwrap(); + let snapshot = ArbitrarySnapshotGenerator::default().snapshot(); + let db_path = DbBuilder::new().with_snapshot(&snapshot).build(&temp_dir)?; + + let server = ServerBootstrapper::new() + .with_db_path(db_path.to_str().unwrap()) + .with_allowed_origins("https://domain.com") + .start(&temp_dir)?; + + let mut rest_client: RawRestClient = + server.rest_client_with_token(&snapshot.token_hash()).into(); + rest_client.set_origin("https://domain.com"); + + assert!(rest_client.funds()?.status().is_success()); + + Ok(()) +} + +#[test] +pub fn cors_multi_domain() -> Result<(), Box> { + let temp_dir = TempDir::new().unwrap().into_persistent(); + let snapshot = ArbitrarySnapshotGenerator::default().snapshot(); + let db_path = DbBuilder::new().with_snapshot(&snapshot).build(&temp_dir)?; + + let server = ServerBootstrapper::new() + .with_db_path(db_path.to_str().unwrap()) + .with_allowed_origins("http://domain.com;http://other_domain.com") + .start(&temp_dir)?; + + let mut rest_client: RawRestClient = + server.rest_client_with_token(&snapshot.token_hash()).into(); + rest_client.set_origin("http://other_domain.com"); + assert!(rest_client.funds()?.status().is_success()); + + rest_client.set_origin("http://domain.com"); + assert!(rest_client.funds()?.status().is_success()); + + assert!(!server.logger().any_error()); + + Ok(()) +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/rest/funds.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/rest/funds.rs new file mode 100644 index 0000000000..fcc8aeedd9 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/rest/funds.rs @@ -0,0 +1,55 @@ +use crate::common::{ + clients::RawRestClient, + data, + startup::{db::DbBuilder, quick_start, server::ServerBootstrapper}, +}; +use assert_fs::TempDir; +use reqwest::StatusCode; + +#[test] +pub fn get_funds_list_is_not_empty() -> Result<(), Box> { + let temp_dir = TempDir::new().unwrap(); + let (server, snapshot) = quick_start(&temp_dir)?; + server + .rest_client_with_token(&snapshot.token_hash()) + .funds() + .expect("cannot get funds"); + Ok(()) +} + +#[test] +pub fn get_funds_by_id() -> Result<(), Box> { + use pretty_assertions::assert_eq; + let temp_dir = TempDir::new().unwrap().into_persistent(); + let mut expected_fund = data::funds().first().unwrap().clone(); + let (hash, token) = data::token(); + + let db_path = DbBuilder::new() + .with_token(token) + .with_funds(vec![expected_fund.clone()]) + .with_challenges(expected_fund.challenges.clone()) + .build(&temp_dir)?; + + let server = ServerBootstrapper::new() + .with_db_path(db_path.to_str().unwrap()) + .start(&temp_dir)?; + + let rest_client = server.rest_client_with_token(&hash); + + let actual_fund = rest_client.fund(&expected_fund.id.to_string())?; + expected_fund.challenges.sort_by_key(|c| c.internal_id); + assert_eq!(expected_fund, actual_fund); + + let rest_client: RawRestClient = server.rest_client_with_token(&hash).into(); + // non existing + assert_eq!(rest_client.fund("2")?.status(), StatusCode::NOT_FOUND); + // malformed index + assert_eq!(rest_client.fund("a")?.status(), StatusCode::NOT_FOUND); + // overflow index + assert_eq!( + rest_client.fund("3147483647")?.status(), + StatusCode::NOT_FOUND + ); + + Ok(()) +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/rest/genesis.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/rest/genesis.rs new file mode 100644 index 0000000000..440f8b67b4 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/rest/genesis.rs @@ -0,0 +1,51 @@ +use crate::common::{ + clients::RawRestClient, + data, + startup::{db::DbBuilder, server::ServerBootstrapper}, +}; +use assert_fs::TempDir; + +use crate::common::paths::BLOCK0_BIN; +use hyper::StatusCode; + +#[test] +pub fn genesis_deserialize_bijection() -> Result<(), Box> { + let temp_dir = TempDir::new().unwrap(); + let (hash, token) = data::token(); + + let db_path = DbBuilder::new().with_token(token).build(&temp_dir).unwrap(); + + let server = ServerBootstrapper::new() + .with_db_path(db_path.to_str().unwrap()) + .with_block0_path(BLOCK0_BIN) + .start(&temp_dir) + .unwrap(); + + let expected = std::fs::read(BLOCK0_BIN).unwrap(); + + let genesis_as_bytes = server + .rest_client_with_token(&hash) + .genesis() + .expect("cannot get genesis block bytes"); + + assert_eq!(expected, genesis_as_bytes); + Ok(()) +} + +#[test] +pub fn non_existing_block0() { + let temp_dir = TempDir::new().unwrap(); + let (hash, token) = data::token(); + + let db_path = DbBuilder::new().with_token(token).build(&temp_dir).unwrap(); + + let server = ServerBootstrapper::new() + .with_db_path(db_path.to_str().unwrap()) + .with_block0_path("non/existing/path") + .start(&temp_dir) + .unwrap(); + + let rest_raw: RawRestClient = server.rest_client_with_token(&hash).into(); + + assert_eq!(rest_raw.genesis().unwrap().status(), StatusCode::NO_CONTENT); +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/rest/mod.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/rest/mod.rs new file mode 100644 index 0000000000..34446ee82b --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/rest/mod.rs @@ -0,0 +1,8 @@ +pub mod advisor_reviews; +pub mod cors; +pub mod funds; +pub mod genesis; +pub mod proposals; +mod service_version; +pub mod token; +pub mod voteplan_id; diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/rest/proposals.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/rest/proposals.rs new file mode 100644 index 0000000000..0f1650ad41 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/rest/proposals.rs @@ -0,0 +1,59 @@ +use crate::common::{ + clients::RawRestClient, + data, + startup::{db::DbBuilder, quick_start, server::ServerBootstrapper}, +}; + +use assert_fs::TempDir; +use reqwest::StatusCode; + +#[test] +pub fn get_proposals_list_is_not_empty() { + let temp_dir = TempDir::new().unwrap(); + let (server, snapshot) = quick_start(&temp_dir).unwrap(); + let proposals = server + .rest_client_with_token(&snapshot.token_hash()) + .proposals() + .expect("cannot get proposals"); + assert!(!proposals.is_empty()); +} + +#[test] +pub fn get_proposal_by_id() -> Result<(), Box> { + let temp_dir = TempDir::new().unwrap().into_persistent(); + let mut expected_proposal = data::proposals().first().unwrap().clone(); + let mut expected_challenge = data::challenges().first().unwrap().clone(); + expected_proposal.proposal.challenge_id = expected_challenge.id; + expected_challenge.challenge_type = expected_proposal.challenge_type.clone(); + + let (hash, token) = data::token(); + + let db_path = DbBuilder::new() + .with_token(token) + .with_proposals(vec![expected_proposal.clone()]) + .with_challenges(vec![expected_challenge]) + .build(&temp_dir)?; + + let server = ServerBootstrapper::new() + .with_db_path(db_path.to_str().unwrap()) + .start(&temp_dir) + .unwrap(); + + let rest_client = server.rest_client_with_token(&hash); + + let actual_proposal = + rest_client.proposal(&expected_proposal.proposal.internal_id.to_string())?; + assert_eq!(actual_proposal, expected_proposal.proposal); + let rest_client: RawRestClient = rest_client.into(); + // non existing + assert_eq!(rest_client.proposal("2")?.status(), StatusCode::NOT_FOUND); + // malformed index + assert_eq!(rest_client.proposal("a")?.status(), StatusCode::NOT_FOUND); + // overflow index + assert_eq!( + rest_client.proposal("3147483647")?.status(), + StatusCode::NOT_FOUND + ); + + Ok(()) +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/rest/service_version.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/rest/service_version.rs new file mode 100644 index 0000000000..6104827613 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/rest/service_version.rs @@ -0,0 +1,29 @@ +use crate::common::{ + data, + startup::{db::DbBuilder, server::ServerBootstrapper}, +}; +use assert_fs::TempDir; + +#[test] +pub fn service_version() { + let temp_dir = TempDir::new().unwrap(); + let (hash, token) = data::token(); + + let db_path = DbBuilder::new().with_token(token).build(&temp_dir).unwrap(); + let version = "TestV1".to_string(); + let server = ServerBootstrapper::new() + .with_db_path(db_path.to_str().unwrap()) + .with_block0_path("non/existing/path") + .with_service_version(version.clone()) + .start(&temp_dir) + .unwrap(); + + assert_eq!( + server + .rest_client_with_token(&hash) + .service_version() + .unwrap() + .service_version, + version + ); +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/rest/token.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/rest/token.rs new file mode 100644 index 0000000000..472183cc7c --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/rest/token.rs @@ -0,0 +1,38 @@ +use crate::common::{ + clients::RawRestClient, + data, + startup::{db::DbBuilder, server::ServerBootstrapper}, +}; +use assert_fs::TempDir; +use reqwest::StatusCode; + +#[test] +pub fn token_validation() -> Result<(), Box> { + let temp_dir = TempDir::new().unwrap(); + let (hash, token) = data::token(); + + let db_path = DbBuilder::new().with_token(token).build(&temp_dir).unwrap(); + + let server = ServerBootstrapper::new() + .with_db_path(db_path.to_str().unwrap()) + .with_api_tokens(true) + .start(&temp_dir) + .unwrap(); + + let invalid_token = data::token_hash(); + + let rest_client: RawRestClient = server.rest_client_with_token(&hash).into(); + assert_eq!(rest_client.health()?.status(), StatusCode::OK); + + let rest_client: RawRestClient = server.rest_client_with_token(&invalid_token).into(); + assert_eq!(rest_client.health()?.status(), StatusCode::UNAUTHORIZED); + assert_eq!(rest_client.fund("1")?.status(), StatusCode::UNAUTHORIZED); + assert_eq!(rest_client.funds()?.status(), StatusCode::UNAUTHORIZED); + assert_eq!( + rest_client.proposal("1")?.status(), + StatusCode::UNAUTHORIZED + ); + assert_eq!(rest_client.proposals()?.status(), StatusCode::UNAUTHORIZED); + assert_eq!(rest_client.genesis()?.status(), StatusCode::UNAUTHORIZED); + Ok(()) +} diff --git a/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/rest/voteplan_id.rs b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/rest/voteplan_id.rs new file mode 100644 index 0000000000..704ad9b119 --- /dev/null +++ b/src/vit-servicing-station-f10/vit-servicing-station-tests-f10/src/tests/rest/voteplan_id.rs @@ -0,0 +1,50 @@ +use crate::common::data::multivoteplan_snapshot; +use crate::common::startup::{db::DbBuilder, server::ServerBootstrapper}; +use assert_fs::TempDir; +use vit_servicing_station_lib_f10::db::models::proposals::Proposal; +use vit_servicing_station_lib_f10::v0::endpoints::proposals::ProposalVoteplanIdAndIndexes; +#[test] +pub fn get_proposals_by_voteplan_id_and_index() { + let temp_dir = TempDir::new().unwrap().into_persistent(); + let snapshot = multivoteplan_snapshot(); + + let db_path = DbBuilder::new() + .with_snapshot(&snapshot) + .build(&temp_dir) + .unwrap(); + + let server = ServerBootstrapper::new() + .with_db_path(db_path.to_str().unwrap()) + .start(&temp_dir) + .unwrap(); + + let rest_client = server.rest_client_with_token(&snapshot.token_hash()); + let mut expected_proposals = rest_client.proposals().unwrap(); + + let vote_plan_id: String = snapshot.voteplans()[0].chain_voteplan_id.clone(); + let indexes: Vec = expected_proposals + .iter() + .filter(|x| x.chain_voteplan_id == vote_plan_id) + .map(|p| p.chain_proposal_index) + .collect(); + + expected_proposals = expected_proposals + .into_iter() + .filter(|x| x.chain_voteplan_id == vote_plan_id) + .filter(|x| indexes.contains(&x.chain_proposal_index)) + .collect(); + + let mut actual_proposals: Vec = rest_client + .proposals_by_voteplan_id_and_index(&[ProposalVoteplanIdAndIndexes { + vote_plan_id, + indexes, + }]) + .unwrap() + .into_iter() + .map(|proposal| proposal.proposal) + .collect(); + + expected_proposals.sort_by(|a, b| a.internal_id.cmp(&b.internal_id)); + actual_proposals.sort_by(|a, b| a.internal_id.cmp(&b.internal_id)); + assert_eq!(actual_proposals, expected_proposals); +} diff --git a/src/vit-testing/valgrind/src/bin/valgrind.rs b/src/vit-testing/valgrind/src/bin/valgrind.rs index edc149ed85..0574a30e43 100644 --- a/src/vit-testing/valgrind/src/bin/valgrind.rs +++ b/src/vit-testing/valgrind/src/bin/valgrind.rs @@ -75,7 +75,7 @@ async fn main() { let block0_content = server_stub.block0(); - let block0 = warp::path!("block0").map(move || Ok(block0_content.clone())); + let block0 = warp::path!("block0").map(move || block0_content.clone()); root.and( proposals diff --git a/src/vit-testing/vitup/src/mode/mock/rest/mod.rs b/src/vit-testing/vitup/src/mode/mock/rest/mod.rs index f979f8d372..c5ed6f8376 100644 --- a/src/vit-testing/vitup/src/mode/mock/rest/mod.rs +++ b/src/vit-testing/vitup/src/mode/mock/rest/mod.rs @@ -225,7 +225,9 @@ pub async fn start_rest_server(context: ContextLock) -> Result<(), Error> { let span = span!(Level::INFO, "rest api call received"); let _enter = span.enter(); tracing::info!("get block0"); - Ok(context.read().unwrap().block0_bin()) + //unwrapping is ok because this is a test module + let context_result = context.read().unwrap(); + context_result.block0_bin() }) .with(warp::reply::with::headers(default_headers())); diff --git a/tests/Earthfile b/tests/Earthfile new file mode 100644 index 0000000000..6648477e76 --- /dev/null +++ b/tests/Earthfile @@ -0,0 +1,27 @@ +VERSION 0.7 + +FROM debian:stable-slim + +# BASH, CURL, GPG, NODEJS, NPM, JRE +dependencies: + RUN apt-get update + RUN apt-get install bash + RUN apt-get install curl -y + RUN apt-get install gpg -y + RUN curl -fsSL https://deb.nodesource.com/gpgkey/nodesource-repo.gpg.key | gpg --dearmor -o /etc/apt/keyrings/nodesource.gpg + RUN apt-get install nodejs -y + RUN apt-get install npm -y + RUN apt-get install default-jre -y + +generate-client: + FROM +dependencies + + # TODO(@nicopado): Get the correct file from openapi doc generation target - https://github.com/input-output-hk/catalyst-core/issues/589 + ARG openapispec_file="./petstore.yaml" + COPY $openapispec_file . + + RUN npm install @openapitools/openapi-generator-cli -g + + RUN openapi-generator-cli validate -i $openapispec_file + + RUN openapi-generator-cli generate -i $openapispec_file -g rust -o ./tmp/client/ --package-name cat-data-service-client \ No newline at end of file diff --git a/utilities/fragment-exporter/Earthfile b/utilities/fragment-exporter/Earthfile index 817b77b6d1..ce53e47a1c 100644 --- a/utilities/fragment-exporter/Earthfile +++ b/utilities/fragment-exporter/Earthfile @@ -1,10 +1,8 @@ # Set the Earthly version to 0.7 VERSION 0.7 -# Use current debian stable with python -FROM python:3.11-slim-bookworm - -poetry: +deps: + FROM python:3.11-slim-bookworm WORKDIR /work ENV POETRY_HOME=/tmp/poetry @@ -21,19 +19,16 @@ poetry: RUN poetry install --only main --no-root src: - FROM +poetry + FROM +deps - COPY --dir fragment_exporter README.md . + COPY --dir fragment_exporter tests README.md . check: FROM +src - COPY --dir tests tests - RUN poetry install --only dev RUN poetry run black --check . RUN poetry run ruff check . - RUN poetry run pytest -v build: FROM +check @@ -44,9 +39,14 @@ build: SAVE ARTIFACT dist SAVE ARTIFACT requirements.txt -docker: +test: + FROM +build + + RUN poetry run pytest -v + +publish: + FROM python:3.11-slim-bookworm ARG tag="latest" - ARG registry WORKDIR /app @@ -59,5 +59,4 @@ docker: RUN pip3 install --no-cache *.whl ENTRYPOINT ["/app/entry.sh"] - - SAVE IMAGE --push ${registry}fragment-exporter:$tag \ No newline at end of file + SAVE IMAGE fragment-exporter:$tag \ No newline at end of file diff --git a/utilities/ideascale-importer/Earthfile b/utilities/ideascale-importer/Earthfile index 8aab8956f8..4b80819eec 100644 --- a/utilities/ideascale-importer/Earthfile +++ b/utilities/ideascale-importer/Earthfile @@ -73,12 +73,13 @@ docker: SAVE IMAGE --push ${registry}ideascale-importer:$tag # Run tests -test: - FROM +build - - RUN --no-cache \ - --secret IDEASCALE_EMAIL \ - --secret IDEASCALE_PASSWORD \ - --secret IDEASCALE_API_TOKEN \ - IDEASCALE_API_URL="https://temp-cardano-sandbox.ideascale.com" \ - poetry run pytest +# TODO: Enable this when CI supports secrets +# test: +# FROM +build + +# RUN --no-cache \ +# --secret IDEASCALE_EMAIL \ +# --secret IDEASCALE_PASSWORD \ +# --secret IDEASCALE_API_TOKEN \ +# IDEASCALE_API_URL="https://temp-cardano-sandbox.ideascale.com" \ +# poetry run pytest diff --git a/utilities/ideascale-importer/ideascale_importer/cli/snapshot.py b/utilities/ideascale-importer/ideascale_importer/cli/snapshot.py index 6ca7978bde..f7c8087ab7 100644 --- a/utilities/ideascale-importer/ideascale_importer/cli/snapshot.py +++ b/utilities/ideascale-importer/ideascale_importer/cli/snapshot.py @@ -86,7 +86,12 @@ async def inner(): and ssh_snapshot_tool_path is not None and ssh_snapshot_tool_output_dir is not None ): - ssh_config = SSHConfig(ssh_keyfile, ssh_destination, ssh_snapshot_tool_path, ssh_snapshot_tool_output_dir) + ssh_config = SSHConfig( + keyfile_path=ssh_keyfile, + destination=ssh_destination, + snapshot_tool_path=ssh_snapshot_tool_path, + snapshot_tool_output_dir=ssh_snapshot_tool_output_dir, + ) else: if snapshot_tool_ssh: logger.error( diff --git a/utilities/ideascale-importer/ideascale_importer/snapshot_importer.py b/utilities/ideascale-importer/ideascale_importer/snapshot_importer.py index 0dd5fde8ab..7f89a8ecd1 100644 --- a/utilities/ideascale-importer/ideascale_importer/snapshot_importer.py +++ b/utilities/ideascale-importer/ideascale_importer/snapshot_importer.py @@ -266,6 +266,7 @@ class SnapshotReport(BaseModel): registrations_count: int = 0 registered_voting_power: int = 0 unregistered_voting_power: int = 0 + eligible_voters_count: int = 0 processed_voting_power: int = 0 cip_15_registration_count: int = 0 cip_36_single_registration_count: int = 0 @@ -435,6 +436,7 @@ async def _run_snapshot_tool(self): snapshot_tool_cmd = ( "ssh" f" -i {self.ssh_config.keyfile_path}" + " -oTCPKeepAlive=no -oServerAliveInterval=20" f" {self.ssh_config.destination}" f" {self.ssh_config.snapshot_tool_path}" f" --db-user {db_user}" @@ -702,6 +704,7 @@ async def _write_db_data(self): for ctd in network_processed_snapshot: for snapshot_contribution in ctd.contributions: network_report.processed_voting_power += snapshot_contribution.value + network_report.eligible_voters_count += 1 voting_key = ctd.hir.voting_key # This can be removed once it's fixed in catalyst-toolbox @@ -746,6 +749,7 @@ async def _write_db_data(self): total_cip_36_multi_registrations=network_report.cip_36_multi_registration_count, total_registered_voting_power=network_report.registered_voting_power, total_unregistered_voting_power=network_report.unregistered_voting_power, + total_eligible_voters=network_report.eligible_voters_count, total_processed_voting_power=network_report.processed_voting_power, total_rewards_payable=network_report.rewards_payable, total_rewards_unpayable=network_report.rewards_unpayable,