diff --git a/.github/pr-comment-templates/pr-change-analysis.template.md b/.github/pr-comment-templates/pr-change-analysis.template.md new file mode 100644 index 00000000..223cf68a --- /dev/null +++ b/.github/pr-comment-templates/pr-change-analysis.template.md @@ -0,0 +1,23 @@ +# Changes Analysis + +**Commit SHA:** {{.after_sha}} +**Comparing To SHA:** {{.before_sha}} + +## API Changes + +### Summary +{{.api_changes_summary}} + +### Report +The full API changes report is available at: {{.api_changes_report_url}} + +## API Coverage +{{with .api_coverage}} + +| | Before | After | Δ | +|--------------:|-----------------------------------------------------|---------------------------------------------------|---------------------------------------------------| +| Covered (%) | {{.before.covered}} ({{.before.covered_pct}} %) | {{.after.covered}} ({{.after.covered_pct}} %) | {{.covered_delta}} ({{.covered_pct_delta}} %) | +| Uncovered (%) | {{.before.uncovered}} ({{.before.uncovered_pct}} %) | {{.after.uncovered}} ({{.after.uncovered_pct}} %) | {{.uncovered_delta}} ({{.uncovered_pct_delta}} %) | +| Unknown | {{.before.specified_but_not_provided}} | {{.after.specified_but_not_provided}} | {{.specified_but_not_provided_delta}} | + +{{end}} \ No newline at end of file diff --git a/.github/workflows/add-untriaged.yml b/.github/workflows/add-untriaged.yml index 9dcc7020..752f5c73 100644 --- a/.github/workflows/add-untriaged.yml +++ b/.github/workflows/add-untriaged.yml @@ -1,19 +1,20 @@ -name: Apply 'untriaged' label during issue lifecycle - -on: - issues: - types: [opened, reopened, transferred] - -jobs: - apply-label: - runs-on: ubuntu-latest - steps: - - uses: actions/github-script@v6 - with: - script: | - github.rest.issues.addLabels({ - issue_number: context.issue.number, - owner: context.repo.owner, - repo: context.repo.repo, - labels: ['untriaged'] - }) +name: Apply 'untriaged' label during issue lifecycle + +on: + issues: + types: [opened, reopened, transferred] + +jobs: + apply-label: + runs-on: ubuntu-latest + steps: + - name: Add `untriaged` Label + uses: actions/github-script@v6 + with: + script: | + github.rest.issues.addLabels({ + issue_number: context.issue.number, + owner: context.repo.owner, + repo: context.repo.repo, + labels: ['untriaged'] + }) diff --git a/.github/workflows/analyze-pr-changes.yml b/.github/workflows/analyze-pr-changes.yml new file mode 100644 index 00000000..ee2cc06e --- /dev/null +++ b/.github/workflows/analyze-pr-changes.yml @@ -0,0 +1,219 @@ +name: Analyze PR Changes + +on: [pull_request] + +jobs: + analyze: + runs-on: ubuntu-latest + steps: + - name: Checkout Repo + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Setup Environment Variables + shell: bash -eo pipefail {0} + run: | + BEFORE_SHA=$(git merge-base "$BASE_SHA" "$HEAD_SHA") + AFTER_SHA=${HEAD_SHA} + CLUSTER_SPEC=/tmp/opensearch-openapi-CLUSTER.yaml + BEFORE_SPEC=/tmp/opensearch-openapi-${BEFORE_SHA}.yaml + AFTER_SPEC=/tmp/opensearch-openapi-${AFTER_SHA}.yaml + BEFORE_COVERAGE=/tmp/coverage-api-${BEFORE_SHA}.json + AFTER_COVERAGE=/tmp/coverage-api-${AFTER_SHA}.json + COVERAGE_DIFF=/tmp/coverage-api-${BEFORE_SHA}-${AFTER_SHA}-DIFF.json + + vars=( + BEFORE_SHA + AFTER_SHA + CLUSTER_SPEC + BEFORE_SPEC + AFTER_SPEC + BEFORE_COVERAGE + AFTER_COVERAGE + COVERAGE_DIFF + ) + + { + for var in "${vars[@]}" + do + echo "${var}=${!var}" + done + } | tee "$GITHUB_ENV" + env: + HEAD_SHA: ${{ github.event.pull_request.head.sha }} + BASE_SHA: ${{ github.event.pull_request.base.sha }} + + - name: Dump OpenSearch Cluster's API + shell: bash -eo pipefail {0} + run: | + docker build coverage --tag opensearch-with-api-plugin + + docker run \ + --name opensearch \ + --rm -d \ + -p 9200:9200 -p 9600:9600 \ + -e "discovery.type=single-node" \ + -e OPENSEARCH_INITIAL_ADMIN_PASSWORD="$OPENSEARCH_PASSWORD" \ + opensearch-with-api-plugin + + npm install + + npm run dump-cluster-spec -- --insecure --output $CLUSTER_SPEC + + docker stop opensearch + env: + OPENSEARCH_PASSWORD: BobgG7YrtsdKf9M + + - name: Checkout BEFORE Spec + shell: bash -eo pipefail {0} + run: git checkout $BEFORE_SHA + + - name: Build BEFORE Spec + shell: bash -eo pipefail {0} + run: | + npm install + npm run merge -- --source ./spec --output $BEFORE_SPEC + + - name: Checkout AFTER Spec + shell: bash -eo pipefail {0} + run: git checkout $AFTER_SHA + + - name: Build AFTER Spec + shell: bash -eo pipefail {0} + run: | + npm install + npm run merge -- --source ./spec --output $AFTER_SPEC + + - name: Calculate Coverage + shell: bash -eo pipefail {0} + run: | + npm run coverage:spec -- \ + --cluster $CLUSTER_SPEC \ + --specification $BEFORE_SPEC \ + --output $BEFORE_COVERAGE + + npm run coverage:spec -- \ + --cluster $CLUSTER_SPEC \ + --specification $AFTER_SPEC \ + --output $AFTER_COVERAGE + + jq . $AFTER_COVERAGE + + jq --slurp ' + [ .[].counts ] + | { + "before": (.[0]), + "after": (.[1]), + "covered_delta": (.[1].covered - .[0].covered), + "covered_pct_delta": ((.[1].covered_pct - .[0].covered_pct) * 100 | round / 100), + "uncovered_delta": (.[1].uncovered - .[0].uncovered), + "uncovered_pct_delta": ((.[1].uncovered_pct - .[0].uncovered_pct) * 100 | round / 100), + "specified_but_not_provided_delta": (.[1].specified_but_not_provided - .[0].specified_but_not_provided) + } + ' \ + $BEFORE_COVERAGE \ + $AFTER_COVERAGE \ + | tee $COVERAGE_DIFF + + - name: Upload Coverage Data + id: upload-coverage + uses: actions/upload-artifact@v4 + with: + name: coverage-api + path: | + /tmp/coverage-api-*.json + + - name: Install openapi-changes + shell: bash -eo pipefail {0} + run: npm install --global @pb33f/openapi-changes + + - name: Generate API Changes HTML Report + shell: bash -eo pipefail {0} + run: openapi-changes html-report --no-logo --no-color $BEFORE_SPEC $AFTER_SPEC + + - name: Upload API Changes HTML Report + id: upload-api-changes-report + uses: actions/upload-artifact@v4 + with: + name: api-changes-report + path: | + report.html + /tmp/opensearch-openapi-*.yaml + + - name: Generate API Changes Summary + shell: bash -eo pipefail {0} + run: | + if ! openapi-changes summary --no-logo --no-color --markdown $BEFORE_SPEC $AFTER_SPEC >output.md ; then + if ! grep -q 'breaking changes discovered' output.md ; then + cat output.md >/dev/stderr + exit 1 + fi + fi + + gawk ' + BEGIN { + RS = "(\r|\n|\r\n)" + WAS_BLANK = 0 + HAD_CHANGES = 0 + } + + /^starting work/ || /^Building original model/ || /^Date:/ || /^SPEC:/ || /^ERROR:/ || /^DONE:/ { + next + } + + /^[[:space:]]*$/ { + WAS_BLANK = 1 + next + } + + WAS_BLANK { + WAS_BLANK = 0 + print "" + } + + { + HAD_CHANGES = 1 + print + } + + END { + if (!HAD_CHANGES) { + print "**NO CHANGES**\n" + } + } + ' output.md | tee changes-summary.md + + - name: Construct Comment Data Payload + shell: bash -eo pipefail {0} + run: | + jq \ + --arg pr_number ${PR_NUMBER} \ + --arg before_sha ${BEFORE_SHA} \ + --arg after_sha ${AFTER_SHA} \ + --arg api_changes_report_url "${API_CHANGES_REPORT_URL}" \ + --rawfile api_changes_summary ./changes-summary.md \ + --slurpfile api_coverage $COVERAGE_DIFF \ + --null-input ' + { + "pr_number": ($pr_number), + "comment_identifier": "# Changes Analysis", + "template_name": "pr-change-analysis", + "template_data": { + "before_sha": ($before_sha), + "after_sha": ($after_sha), + "api_changes_report_url": ($api_changes_report_url), + "api_changes_summary": ($api_changes_summary), + "api_coverage": ($api_coverage[0]) + } + } + ' | tee pr-comment.json + env: + PR_NUMBER: ${{ github.event.pull_request.number }} + API_CHANGES_REPORT_URL: ${{ steps.upload-api-changes-report.outputs.artifact-url }} + + - name: Upload PR Comment Payload + uses: actions/upload-artifact@v4 + with: + name: pr-comment + path: pr-comment.json \ No newline at end of file diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index fa3c882e..0d58084a 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -13,25 +13,23 @@ jobs: steps: - name: Checkout the repo - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Node.js - uses: actions/setup-node@v2 + uses: actions/setup-node@v3 with: node-version: '20' - name: Build - run: |- - mkdir -p ./build - npm install - npm run merge -- --source ./spec --output ./build/opensearch-openapi.yaml + run: npm install && npm run merge - name: Extract Branch Name id: branch shell: bash run: echo "name=${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}}" >> $GITHUB_OUTPUT - - uses: marvinpinto/action-automatic-releases@v1.2.1 + - name: Release Specification to GitHub + uses: marvinpinto/action-automatic-releases@v1.2.1 with: repo_token: "${{ secrets.GITHUB_TOKEN }}" automatic_release_tag: ${{ steps.branch.outputs.name }} diff --git a/.github/workflows/check-links.yml b/.github/workflows/check-links.yml index d4b52195..907d053b 100644 --- a/.github/workflows/check-links.yml +++ b/.github/workflows/check-links.yml @@ -8,7 +8,8 @@ jobs: check: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - name: Checkout Repo + uses: actions/checkout@v4 - name: Link Checker uses: lycheeverse/lychee-action@v1 diff --git a/.github/workflows/coverage-api.yml b/.github/workflows/coverage-api.yml deleted file mode 100644 index 482e242b..00000000 --- a/.github/workflows/coverage-api.yml +++ /dev/null @@ -1,60 +0,0 @@ -name: Gather API Coverage - -on: [push, pull_request] - -env: - JAVA_VERSION: 11 - OPENSEARCH_INITIAL_ADMIN_PASSWORD: BobgG7YrtsdKf9M - -jobs: - coverage: - runs-on: ubuntu-latest - permissions: - pull-requests: write - steps: - - name: Checkout Repo - uses: actions/checkout@v2 - - name: Build Spec - run: |- - mkdir -p ./build - npm install - npm run merge -- --source ./spec --output ./build/opensearch-openapi.yaml - - name: Build and Run Docker Container - run: | - docker build coverage --tag opensearch-with-api-plugin - docker run -d -p 9200:9200 -p 9600:9600 -e "discovery.type=single-node" -e OPENSEARCH_INITIAL_ADMIN_PASSWORD="$OPENSEARCH_INITIAL_ADMIN_PASSWORD" opensearch-with-api-plugin - sleep 15 - - name: Display OpenSearch Info - run: | - curl -ks -u "admin:$OPENSEARCH_INITIAL_ADMIN_PASSWORD" https://localhost:9200/ | jq - - name: Dump and Compare API - run: | - curl -ks -u "admin:$OPENSEARCH_INITIAL_ADMIN_PASSWORD" https://localhost:9200/_plugins/api | jq > ./build/local-openapi.json - docker run --rm --mount type=bind,source=./build,target=/build openapitools/openapi-diff:latest /build/opensearch-openapi.yaml /build/local-openapi.json --json /build/diff.json - - name: Show Diff - run: | - echo "-------- Missing APIs" - jq -r '.newEndpoints | group_by(.pathUrl)[] | "\(.[0].pathUrl): \([.[].method])"' build/diff.json - echo "-------- Legacy APIs" - jq -r '.missingEndpoints | group_by(.pathUrl)[] | "\(.[0].pathUrl): \([.[].method])"' build/diff.json - - name: Gather Coverage - id: coverage - shell: bash - run: | - current=`docker run --rm -i mikefarah/yq:latest -r '.paths | keys | length' < build/opensearch-openapi.yaml` - total=`jq -r '.paths | keys | length' build/local-openapi.json` - percent=$((current * 100 / total)) - echo "API specs implemented for $current/$total ($percent%) APIs." - cat >>"coverage-api.json" < - github.event.workflow_run.event == 'pull_request' && - github.event.workflow_run.conclusion == 'success' - steps: - - name: Download Coverage Report - uses: actions/download-artifact@v4 - with: - github-token: ${{ secrets.GITHUB_TOKEN }} - pattern: coverage-* - merge-multiple: true - run-id: ${{ github.event.workflow_run.id }} - - - name: 'Comment on PR' - uses: actions/github-script@v3 - with: - github-token: ${{ secrets.GITHUB_TOKEN }} - script: | - const fs = require('fs'); - const coverage_api = JSON.parse(fs.readFileSync('./coverage-api.json')); - console.log(coverage_api); - - body = `API specs implemented for ${coverage_api.current}/${coverage_api.total} (${coverage_api.percent}%) APIs.`; - commit = `Commit ${coverage_api.sha}.` - - const { data: comments } = await github.issues.listComments({ - owner: context.repo.owner, - repo: context.repo.repo, - issue_number: coverage_api.pull_request - }); - - const existing_comment = comments.find( - comment => comment.body.startsWith("API specs implemented for ") - ); - - if (existing_comment && ! existing_comment.body.startsWith(body)) { - // change in coverage, delete existing comment - console.log(`Deleting ${existing_comment.url}.`); - await github.issues.deleteComment({ - owner: context.repo.owner, - repo: context.repo.repo, - issue_number: coverage_api.pull_request, - comment_id: existing_comment.id - }); - } - - if (existing_comment && existing_comment.body.startsWith(body)) { - // no change in coverage, update commit id - await github.issues.updateComment({ - owner: context.repo.owner, - repo: context.repo.repo, - issue_number: coverage_api.pull_request, - comment_id: existing_comment.id, - body: body + "\n" + commit - }); - } else { - // create a new comment - await github.issues.createComment({ - owner: context.repo.owner, - repo: context.repo.repo, - issue_number: coverage_api.pull_request, - body: body + "\n" + commit - }); - } \ No newline at end of file diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy-gh-pages.yml similarity index 95% rename from .github/workflows/deploy.yml rename to .github/workflows/deploy-gh-pages.yml index c180a53b..8f511261 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy-gh-pages.yml @@ -21,7 +21,7 @@ jobs: uses: actions/checkout@v4 - name: Setup Node.js - uses: actions/setup-node@v2 + uses: actions/setup-node@v3 with: node-version: '20' diff --git a/.github/workflows/pr-comment.yml b/.github/workflows/pr-comment.yml new file mode 100644 index 00000000..ae8109f3 --- /dev/null +++ b/.github/workflows/pr-comment.yml @@ -0,0 +1,74 @@ +name: Comment on PR + +on: + workflow_run: + workflows: + - Analyze PR Changes + types: + - completed + +jobs: + comment: + runs-on: ubuntu-latest + if: > + github.event.workflow_run.event == 'pull_request' && + github.event.workflow_run.conclusion == 'success' + steps: + - name: Checkout Repo + uses: actions/checkout@v4 + with: + ref: refs/heads/main + sparse-checkout: | + .github + + - name: Download PR Comment Payload + uses: actions/download-artifact@v4 + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + name: pr-comment + run-id: ${{ github.event.workflow_run.id }} + + - name: Parse Payload + shell: bash -eo pipefail {0} + run: | + PR_NUMBER=$(jq -r '.pr_number' ./pr-comment.json) + COMMENT_IDENTIFIER=$(jq -r '.comment_identifier' ./pr-comment.json) + TEMPLATE_NAME=$(jq -r '.template_name' ./pr-comment.json) + TEMPLATE_DATA=$(jq -c '.template_data' ./pr-comment.json) + + vars=( + PR_NUMBER + COMMENT_IDENTIFIER + TEMPLATE_NAME + TEMPLATE_DATA + ) + + { + for var in "${vars[@]}" + do + echo "${var}=${!var}" + done + } | tee "$GITHUB_ENV" + + - name: Render Comment Template + uses: chuhlomin/render-template@v1 + id: render + with: + template: .github/pr-comment-templates/${{ env.TEMPLATE_NAME }}.template.md + vars: ${{ env.TEMPLATE_DATA }} + + - name: Find Existing Comment + uses: peter-evans/find-comment@v3 + id: fc + with: + issue-number: ${{ env.PR_NUMBER }} + comment-author: 'github-actions[bot]' + body-includes: ${{ env.COMMENT_IDENTIFIER }} + + - name: Create or Update Comment + uses: peter-evans/create-or-update-comment@v4 + with: + comment-id: ${{ steps.fc.outputs.comment-id }} + issue-number: ${{ env.PR_NUMBER }} + body: ${{ steps.render.outputs.result }} + edit-mode: replace \ No newline at end of file diff --git a/.github/workflows/test-tools.yml b/.github/workflows/test-tools.yml index 81121b6b..cd620946 100644 --- a/.github/workflows/test-tools.yml +++ b/.github/workflows/test-tools.yml @@ -4,20 +4,37 @@ on: push: branches: ['**'] paths: + - 'package*.json' + - 'eslint.config.mjs' + - 'jest.config.js' + - 'tsconfig.json' - 'tools/**' pull_request: branches: ['**'] paths: + - 'package*.json' + - 'eslint.config.mjs' + - 'jest.config.js' + - 'tsconfig.json' - 'tools/**' jobs: test: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - - uses: actions/setup-node@v3 + - name: Checkout Repo + uses: actions/checkout@v4 + + - name: Setup Node.js + uses: actions/setup-node@v3 with: - node-version: 20.10.0 - - run: npm install - - run: npm run test - - run: npm run lint + node-version: '20' + + - name: Install Dependencies + run: npm install + + - name: Run Tests + run: npm run test + + - name: Lint + run: npm run lint diff --git a/.github/workflows/validate-spec.yml b/.github/workflows/validate-spec.yml index 5f26fec4..89213ac6 100644 --- a/.github/workflows/validate-spec.yml +++ b/.github/workflows/validate-spec.yml @@ -16,10 +16,16 @@ jobs: validate: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - - uses: actions/setup-node@v3 + - name: Checkout Repo + uses: actions/checkout@v4 + + - name: Setup Node.js + uses: actions/setup-node@v3 with: - node-version: 20.10.0 - - run: npm install - - run: | - npm run lint:spec -- --source ./spec + node-version: '20' + + - name: Install Dependencies + run: npm install + + - name: Lint Spec + run: npm run lint:spec diff --git a/DEVELOPER_GUIDE.md b/DEVELOPER_GUIDE.md index 497cc8c2..f261c5e8 100644 --- a/DEVELOPER_GUIDE.md +++ b/DEVELOPER_GUIDE.md @@ -1,26 +1,52 @@ -- [Developer Guide](#developer-guide) - - [Getting Started](#getting-started) - - [File Structure](#file-structure) - - [Grouping Operations](#grouping-operations) - - [Grouping Schemas](#grouping-schemas) - - [Superseded Operations](#superseded-operations) - - [Global Parameters](#global-parameters) - - [OpenAPI Extensions](#openapi-extensions) - - [Tools](#tools) - - [Merger](#merger) - - [Linter](#linter) + +* [Developer Guide](#developer-guide) + * [Getting Started](#getting-started) + * [Specification](#specification) + * [File Structure](#file-structure) + * [Grouping Operations](#grouping-operations) + * [Grouping Schemas](#grouping-schemas) + * [Superseded Operations](#superseded-operations) + * [Global Parameters](#global-parameters) + * [OpenAPI Extensions](#openapi-extensions) + * [Tools](#tools) + * [Setup](#setup) + * [Merger](#merger) + * [Arguments](#arguments) + * [Example](#example) + * [Spec Linter](#spec-linter) + * [Arguments](#arguments-1) + * [Example](#example-1) + * [Dump Cluster Spec](#dump-cluster-spec) + * [Arguments](#arguments-2) + * [Example](#example-2) + * [Coverage](#coverage) + * [Arguments](#arguments-3) + * [Example](#example-3) + * [Testing](#testing) + * [Tests](#tests) + * [Lints](#lints) + * [Workflows](#workflows) + * [Analyze PR Changes](#analyze-pr-changes) + * [Build](#build) + * [Deploy GitHub Pages](#deploy-github-pages) + * [Comment on PR](#comment-on-pr) + * [Test Tools](#test-tools) + * [Validate Spec](#validate-spec) + # Developer Guide -Welcome to the ```opensearch-api-specification``` developer guide! Glad you want to contribute. Here are the things you need to know while getting started! +Welcome to the `opensearch-api-specification` developer guide! Glad you want to contribute. Here are the things you need to know while getting started! ## Getting Started Fork the [opensearch-api-specification](https://github.com/opensearch-project/opensearch-api-specification) repository to your GitHub account and clone it to your local machine. Whenever you're drafting a change, create a new branch for the change on your fork instead of on the upstream repository. +## Specification + The Specification is written in OpenAPI 3, so understanding the OpenAPI 3 specification is a must. If you are new to OpenAPI, you can start by reading the [OpenAPI 3 Specification](https://swagger.io/specification/). -## File Structure +### File Structure To make editing the specification easier, we split the OpenAPI spec into multiple files that can be found in the [spec](spec) directory. The file structure is as follows: @@ -53,7 +79,7 @@ spec Every `.yaml` file in the namespaces and schemas folders is a OpenAPI 3 document. This means that you can use any OpenAPI 3 compatible tool to view and edit the files, and IDEs with OpenAPI support will also offer autocomplete and validation in realtime. -## Grouping Operations +### Grouping Operations Each API action is composed of multiple operations. The `search` action, for example, consists of 4 operations: @@ -68,7 +94,7 @@ Note that this extension tells the client generators that these operations serve For this reason, every operation *must* be accompanied by the `x-operation-group` extension, and operations in the same group MUST have identical descriptions, request and response bodies, and query string parameters. -## Grouping Schemas +### Grouping Schemas Schemas are grouped by categories to keep their names short, and aid in client generation (where the schemas are translated into data types/classes, and divided into packages/modules). The schema file names can be in one of the following formats: @@ -77,9 +103,9 @@ Schemas are grouped by categories to keep their names short, and aid in client g - `._common` category holds the common schemas of a specific namespace. (e.g. `cat._common`, `_core._common`) - `.` category holds the schemas of a specific sub_category of a namespace. (e.g. `cat.aliases`, `_core.search`) -## Superseded Operations +### Superseded Operations -When an operation is superseded by another operation with **identical functionality**, that is a rename or a change in the URL, it should be listed in [_superseded_operations.yaml](./spec/_superseded_operations.yaml) file. The merger tool will automatically generate the superseded operation in the OpenAPI spec. The superseded operation will have `deprecated: true` and `x-ignorable: true` properties to indicate that it should be ignored by the client generator. +When an operation is superseded by another operation with **identical functionality**, that is a rename or a change in the URL, it should be listed in [_superseded_operations.yaml](spec/_superseded_operations.yaml) file. The merger tool will automatically generate the superseded operation in the OpenAPI spec. The superseded operation will have `deprecated: true` and `x-ignorable: true` properties to indicate that it should be ignored by the client generator. For example, if the `_superseded_operations.yaml` file contains the following entry: ```yaml @@ -102,10 +128,11 @@ if and only if the superseding operations exist in the spec. A warning will be p Note that the path parameter names do not need to match. So, if the actual superseding operations have path of `/_plugins/_anomaly_detection/{node_id}/stats/{stat_id}`, the merger tool will recognize that it is the same as `/_plugins/_anomaly_detection/{nodeId}/stats/{stat}` and generate the superseded operations accordingly with the correct path parameter names. -## Global Parameters +### Global Parameters + Certain query parameters are global, and they are accepted by every operation. These parameters are listed in the [spec/_global_parameters.yaml](spec/_global_parameters.yaml). The merger tool will automatically add these parameters to all operations. -## OpenAPI Extensions +### OpenAPI Extensions This repository includes several OpenAPI Specification Extensions to fill in any metadata not natively supported by OpenAPI: @@ -120,12 +147,195 @@ This repository includes several OpenAPI Specification Extensions to fill in any ## Tools -We authored a number of tools to merge and lint specs that live in [tools](./tools/src/). All tools have tests (run with `npm run test`) and a linter (run with `npm run lint`). +A number of [tools](tools) have been authored using TypeScript to aid in the development of the specification. These largely center around linting and merging the multi-file spec layout. + +### Setup + +To be able to use or develop the tools, some setup is required: +1. Install [Node.js](https://nodejs.org/en/learn/getting-started/how-to-install-nodejs). +2. Run `npm install` from the repository's root. + +### [Merger](tools/src/merger) + +```bash +npm run merge -- --help +``` + +The merger tool merges the multi-file OpenSearch spec into a single file for programmatic use. + +#### Arguments + +- `--source `: The path to the root folder of the multi-file spec, defaults to `/spec`. +- `--output `: The path to write the final merged spec to, defaults to `/build/opensearch-openapi.yaml`. + +#### Example + +We can take advantage of the default values and simply merge the specification via: +```bash +npm run merge +``` + +### [Spec Linter](tools/src/linter) + +```bash +npm run lint:spec -- --help +``` + +The linter tool validates the OpenSearch multi-file spec, and will print out all the errors and warnings in it. + +#### Arguments + +- `--source `: The path to the root folder of the multi-file spec, defaults to `/spec`. + +#### Example + +We can take advantage of the default values and simply lint the specification via: +```bash +npm run lint:spec +``` + +### [Dump Cluster Spec](tools/src/dump-cluster-spec) + +```bash +npm run dump-cluster-spec -- --help +``` + +The dump-cluster-spec tool connects to an OpenSearch cluster which has the [opensearch-api plugin](https://github.com/dblock/opensearch-api) installed and dumps the skeleton OpenAPI specification it provides to a file. + +#### Arguments + +- `--host `: The host at which the cluster is accessible, defaults to `localhost`. +- `--port `: The port at which the cluster is accessible, defaults to `9200`. +- `--no-https`: Disable HTTPS, defaults to using HTTPS. +- `--insecure`: Disable SSL/TLS certificate verification, defaults to performing verification. +- `--username `: The username to authenticate with the cluster, defaults to `admin`, only used when `--password` is set. +- `--password `: The password to authenticate with the cluster, also settable via the `OPENSEARCH_PASSWORD` environment variable. +- `--output `: The path to write the dumped spec to, defaults to `/build/opensearch-openapi-CLUSTER.yaml`. + +#### Example + +You can use this repo's [docker image which includes the opensearch-api plugin](coverage/Dockerfile) to spin up a local development cluster with a self-signed certificate (e.g. `https://localhost:9200`) and security enabled, to then dump the skeleton specification: +```bash +OPENSEARCH_PASSWORD='My$3cureP@$$w0rd' + +docker build ./coverage --tag opensearch-with-api-plugin + +docker run \ + --name opensearch \ + --rm -d \ + -p 9200:9200 -p 9600:9600 \ + -e "discovery.type=single-node" \ + -e OPENSEARCH_INITIAL_ADMIN_PASSWORD="$OPENSEARCH_PASSWORD" \ + opensearch-with-api-plugin + +OPENSEARCH_PASSWORD="${OPENSEARCH_PASSWORD}" npm run dump-cluster-spec -- --insecure + +docker stop opensearch +``` + +### [Coverage](tools/src/coverage) + +```bash +npm run coverage:spec -- --help +``` + +The coverage tool determines which APIs from the OpenSearch cluster's reference skeleton specification (dumped by the [dump-cluster-spec tool](#dump-cluster-spec)) are covered by this specification (as built by the [merger tool](#merger)). + +#### Arguments + +- `--cluster `: The path to the cluster's reference skeleton specification, as dumped by [dump-cluster-spec](#dump-cluster-spec), defaults to `/build/opensearch-openapi-CLUSTER.yaml`. +- `--specification `: The path to the merged specification, as built by [merger](#merger), defaults to `/build/opensearch-openapi.yaml`. +- `--output `: The path to write the coverage data to, defaults to `/build/coverage.json`. + +#### Example + +Assuming you've already followed the previous examples to build the merged specification with the [merger](#example) and dump the cluster's specification with [dump-cluster-spec](#example-2), you can then calculate the API coverage: +```bash +npm run coverage:spec +``` +The output file `build/coverage.json` will now contain data of like below: +```json +{ + "$description": { + "uncovered": "Endpoints provided by the OpenSearch cluster but DO NOT exist in the specification", + "covered": "Endpoints both provided by the OpenSearch cluster and exist in the specification", + "specified_but_not_provided": "Endpoints NOT provided by the OpenSearch cluster but exist in the specification" + }, + "counts": { + "uncovered": 552, + "uncovered_pct": 54.06, + "covered": 469, + "covered_pct": 45.94, + "specified_but_not_provided": 23 + }, + "endpoints": { + "uncovered": { + "/_alias": [ + "put" + ], + ... + }, + "covered": { + "/_mapping": [ + "get" + ], + ... + }, + "specified_but_not_provided": { + "/_plugins/_knn/{}/stats": [ + "get" + ], + ... + } + } +} +``` + +### Testing + +#### Tests + +All tools should have tests added in [tools/tests](tools/tests), tests are implemented using [Jest](https://jestjs.io/). They can be run via: +```bash +npm run test +``` + +#### Lints + +All code is linted using [ESLint](https://eslint.org/) in combination with [typescript-eslint](https://typescript-eslint.io/). Linting can be run via: +```bash +npm run lint +``` + +If a lint is unavoidable it should only be disabled on a case-by-case basis (e.g. `// eslint-disable-next-line @typescript-eslint/dot-notation`) and ideally be justified with an accompanying comment or at least in PR review. + +ESLint's auto-fixes can be applied by running: +```bash +npm run lint--fix +``` + +## Workflows + +### [Analyze PR Changes](.github/workflows/analyze-pr-changes.yml) + +This workflow runs on all pull requests to analyze any potential changes to the specification. It uses the [coverage](#coverage) tool and [openapi-changes](https://pb33f.io/openapi-changes/) to calculate coverage metrics and provide a report on the changes when comparing with the commit at which the PR was branched off. + +### [Build](.github/workflows/build.yml) + +This workflow runs on pushes to the `main` branch and will [merge](#merger) the specification and publish it to [GitHub Releases](https://github.com/opensearch-project/opensearch-api-specification/releases). + +### [Deploy GitHub Pages](.github/workflows/deploy-gh-pages.yml) + +This workflow performs a [Jekyll](https://jekyllrb.com/) build of the `main` branch to generate the [Swagger docs](index.html) and publish it to [GitHub Pages](https://opensearch-project.github.io/opensearch-api-specification/). + +### [Comment on PR](.github/workflows/pr-comment.yml) + +This workflow is triggered by the completion of the workflows such as [Analyze PR Changes](#analyze-pr-changes) and downloading a JSON payload artifact which it uses to invoke a template from [.github/pr-comment-templates](.github/pr-comment-templates) to render a comment which is placed on the original triggering PR. -### Merger +### [Test Tools](.github/workflows/test-tools.yml) -The spec merger "builds", aka combines all `.yaml` files in a spec folder into a complete OpenAPI spec. A [workflow](./.github/workflows/build.yml) performs this task on the [spec folder](spec) of this repo then publishes the output into [releases](https://github.com/opensearch-project/opensearch-api-specification/releases). +This workflow runs on PRs to invoke the [tools' tests](tools/tests) and [TypeScript linting](#lints) to ensure there are no breakages in behavior or departures from the desired code style and cleanliness. -### Linter +### [Validate Spec](.github/workflows/validate-spec.yml) -The spec linter that validates every `.yaml` file in the `./spec` folder to assure that they follow the guidelines we have set. Check out the [Linter README](tools/README.md#spec-linter) for more information on how to run it locally. Make sure to run the linter before submitting a PR. +This workflow runs on PRs to invoke the [spec linter](#spec-linter) and ensure the multi-file spec is correct and follows the design guidelines. \ No newline at end of file diff --git a/PUBLISHING_SPECS.md b/PUBLISHING_SPECS.md index 837dac89..3925d962 100644 --- a/PUBLISHING_SPECS.md +++ b/PUBLISHING_SPECS.md @@ -3,4 +3,4 @@ ## Publishing OpenSearch API Specs * The [build](.github/workflows/build.yml) workflow publishes [a release draft](https://github.com/opensearch-project/opensearch-api-specification/releases) whenever a change is pushed to `main`. -* The [deploy](.github/workflows/deploy.yml) workflow publishes specs to [GitHub pages](https://opensearch-project.github.io/opensearch-api-specification/). \ No newline at end of file +* The [deploy-gh-pages](.github/workflows/deploy-gh-pages.yml) workflow publishes specs to [GitHub pages](https://opensearch-project.github.io/opensearch-api-specification/). \ No newline at end of file diff --git a/_plugins/openapi.rb b/_plugins/openapi.rb index dd44775f..663ec860 100644 --- a/_plugins/openapi.rb +++ b/_plugins/openapi.rb @@ -3,7 +3,7 @@ def self.generate(_site, _payload) return if @generated system 'npm install' - system 'npm run merge -- --source ./spec --output ./_site/opensearch-openapi.yaml' + system 'npm run merge -- --output ./_site/opensearch-openapi.yaml' @generated = true end diff --git a/coverage/README.md b/coverage/README.md index 3afdb91b..e372aa27 100644 --- a/coverage/README.md +++ b/coverage/README.md @@ -1,5 +1,5 @@ ### API Coverage -Builds the OpenAPI spec, and uses the [opensearch-api plugin](https://github.com/dblock/opensearch-api) and [openapi-diff](https://github.com/OpenAPITools/openapi-diff) to show the differences. +Builds the OpenAPI spec, and uses the [opensearch-api plugin](https://github.com/dblock/opensearch-api) and this repo's [coverage tool](../DEVELOPER_GUIDE.md#coverage) to show the differences. -API coverage is run on all pull requests via the [coverage workflow](../.github/workflows/coverage-api.yml). +API coverage is run on all pull requests via the [Analyze PR Changes workflow](../.github/workflows/analyze-pr-changes.yml). diff --git a/package-lock.json b/package-lock.json index 14f2fc54..ab4e8268 100644 --- a/package-lock.json +++ b/package-lock.json @@ -15,6 +15,7 @@ "@types/node": "^20.10.3", "ajv": "^8.13.0", "ajv-formats": "^3.0.1", + "axios": "^1.7.1", "commander": "^12.0.0", "lodash": "^4.17.21", "ts-node": "^10.9.1", @@ -2046,6 +2047,11 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" + }, "node_modules/available-typed-arrays": { "version": "1.0.7", "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz", @@ -2061,6 +2067,16 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/axios": { + "version": "1.7.1", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.7.1.tgz", + "integrity": "sha512-+LV37nQcd1EpFalkXksWNBiA17NZ5m5/WspmHGmZmdx1qBOg/VNq/c4eRJiA9VQQHBOs+N0ZhhdU10h2TyNK7Q==", + "dependencies": { + "follow-redirects": "^1.15.6", + "form-data": "^4.0.0", + "proxy-from-env": "^1.1.0" + } + }, "node_modules/babel-jest": { "version": "29.7.0", "resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-29.7.0.tgz", @@ -2465,6 +2481,17 @@ "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", "dev": true }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, "node_modules/commander": { "version": "12.0.0", "resolved": "https://registry.npmjs.org/commander/-/commander-12.0.0.tgz", @@ -2656,6 +2683,14 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "engines": { + "node": ">=0.4.0" + } + }, "node_modules/detect-newline": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/detect-newline/-/detect-newline-3.1.0.tgz", @@ -3712,6 +3747,25 @@ "integrity": "sha512-X8cqMLLie7KsNUDSdzeN8FYK9rEt4Dt67OsG/DNGnYTSDBG4uFAJFBnUeiV+zCVAvwFy56IjM9sH51jVaEhNxw==", "dev": true }, + "node_modules/follow-redirects": { + "version": "1.15.6", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.6.tgz", + "integrity": "sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==", + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/RubenVerborgh" + } + ], + "engines": { + "node": ">=4.0" + }, + "peerDependenciesMeta": { + "debug": { + "optional": true + } + } + }, "node_modules/for-each": { "version": "0.3.3", "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz", @@ -3721,6 +3775,19 @@ "is-callable": "^1.1.3" } }, + "node_modules/form-data": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz", + "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, "node_modules/fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", @@ -5396,6 +5463,25 @@ "node": ">=8.6" } }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, "node_modules/mimic-fn": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", @@ -5822,6 +5908,11 @@ "node": ">= 6" } }, + "node_modules/proxy-from-env": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", + "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==" + }, "node_modules/punycode": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", diff --git a/package.json b/package.json index 0aab8833..72263f34 100644 --- a/package.json +++ b/package.json @@ -5,10 +5,12 @@ "author": "opensearch-project", "license": "Apache-2.0", "scripts": { - "merge": "ts-node tools/src/merger/merge.ts", + "coverage:spec": "ts-node tools/src/coverage/coverage.ts", + "dump-cluster-spec": "ts-node tools/src/dump-cluster-spec/dump-cluster-spec.ts", "lint:spec": "ts-node tools/src/linter/lint.ts", "lint": "eslint .", "lint--fix": "eslint . --fix", + "merge": "ts-node tools/src/merger/merge.ts", "test": "jest" }, "dependencies": { @@ -18,6 +20,7 @@ "@types/node": "^20.10.3", "ajv": "^8.13.0", "ajv-formats": "^3.0.1", + "axios": "^1.7.1", "commander": "^12.0.0", "lodash": "^4.17.21", "ts-node": "^10.9.1", diff --git a/tools/README.md b/tools/README.md deleted file mode 100644 index ec95eab7..00000000 --- a/tools/README.md +++ /dev/null @@ -1,47 +0,0 @@ -# OpenSearch OpenAPI Tools - -This folder contains tools for the repo: - -- [Merger](./src/merger): merges multiple OpenAPI files into one -- [Linter](./src/linter): validates files in the spec folder - -## Setup - -1. Install [Node.js](https://nodejs.org/en/learn/getting-started/how-to-install-nodejs) -2. Run `npm install`. - -## Merger - -The merger tool merges the multi-file OpenSearch spec into a single file for programmatic use. - -It requires a path to the root folder of the multi-file spec (`--source`) and a path to the output file (`--output`). - -Example: - -```bash -mkdir -p ./build -npm run merge -- --source ./spec --output ./build/opensearch-openapi.yaml -``` - -As a shortcut, if those parameters are not provided, the tool will use the default values: - -- `../spec` as the root path (i.e. the repo's [spec folder](../spec)) -- `./build/opensearch-openapi.yaml` as the output path - -```bash -npm run merge -``` - -Run `npm run merge -- --help` for all options. - -## Spec Linter - -The linter tool validates the OpenSearch multi-file spec, and will print out all the errors and warnings in it. - -It requires a path to the root folder of the multi-file spec (`--source`). - -```bash -npm run lint:spec -- --source ./spec -``` - -Run `npm run lint:spec -- --help` for all options. diff --git a/tools/helpers.ts b/tools/helpers.ts index cdbb5af5..276d7f9f 100644 --- a/tools/helpers.ts +++ b/tools/helpers.ts @@ -1,6 +1,10 @@ import fs from 'fs' +import path from 'path' import YAML from 'yaml' import _ from 'lodash' +import { OpenAPIV3 } from 'openapi-types' + +export const HTTP_METHODS: OpenAPIV3.HttpMethods[] = Object.values(OpenAPIV3.HttpMethods) export function resolve_ref (ref: string, root: Record): Record | undefined { const paths = ref.replace('#/', '').split('/') @@ -43,27 +47,35 @@ export function sort_by_keys (obj: Record, priorities: string[] = [ }) } -export function read_yaml (file_path: string, exclude_schema: boolean = false): Record { +export function ensure_parent_dir (file_path: string): void { + fs.mkdirSync(path.dirname(file_path), { recursive: true }) +} + +export function write_text (file_path: string, text: string): void { + ensure_parent_dir(file_path) + fs.writeFileSync(file_path, text) +} + +export function read_yaml> (file_path: string, exclude_schema: boolean = false): T { const doc = YAML.parse(fs.readFileSync(file_path, 'utf8')) - if (exclude_schema) delete doc.$schema + if (typeof doc === 'object' && exclude_schema) delete doc.$schema return doc } -export function write_yaml (file_path: string, content: Record): void { - fs.writeFileSync(file_path, quote_refs(YAML.stringify(remove_anchors(content), { lineWidth: 0, singleQuote: true }))) +export function write_yaml (file_path: string, content: any): void { + write_text(file_path, YAML.stringify( + content, + { + lineWidth: 0, + singleQuote: true, + aliasDuplicateObjects: false + })) } -function quote_refs (str: string): string { - return str.split('\n').map((line) => { - if (line.includes('$ref')) { - const [key, value] = line.split(': ') - if (!value.startsWith("'")) line = `${key}: '${value}'` - } - return line - }).join('\n') +export function write_json (file_path: string, content: any, replacer?: (this: any, key: string, value: any) => any): void { + write_text(file_path, JSON.stringify(content, replacer, 2)) } -function remove_anchors (content: Record): Record { - const replacer = (key: string, value: any): any => key === '$anchor' ? undefined : value - return JSON.parse(JSON.stringify(content, replacer)) +export async function sleep (ms: number): Promise { + await new Promise((resolve) => setTimeout(resolve, ms)) } diff --git a/tools/src/coverage/CoverageCalculator.ts b/tools/src/coverage/CoverageCalculator.ts new file mode 100644 index 00000000..9662e35e --- /dev/null +++ b/tools/src/coverage/CoverageCalculator.ts @@ -0,0 +1,78 @@ +import { type OpenAPIV3 } from 'openapi-types' +import { HTTP_METHODS, read_yaml, write_json } from '../../helpers' + +export default class CoverageCalculator { + private readonly _cluster_spec: OpenAPIV3.Document + private readonly _input_spec: OpenAPIV3.Document + private readonly _output_path: string + + constructor (cluster_spec_path: string, input_spec_path: string, output_path: string) { + this._cluster_spec = read_yaml(cluster_spec_path) + this._input_spec = read_yaml(input_spec_path) + this._output_path = output_path + } + + calculate (): void { + type Endpoints = Record> + const collect = (document: OpenAPIV3.Document): Endpoints => + Object.fromEntries( + Object.entries(document.paths) + .map(([path, path_item]): [string, Set] => { + // Sanitize path params to ignore naming of params in route templates + path = path.replaceAll(/\{[^}]+}/g, '{}') + if (path_item == null) return [path, new Set()] + return [path, new Set(HTTP_METHODS.filter(method => path_item[method] != null))] + }) + ) + const count = (endpoints: Endpoints): number => + Object.values(endpoints).map(methods => methods.size).reduce((acc, v) => acc + v, 0) + const prune = (endpoints: Endpoints): Endpoints => + Object.fromEntries(Object.entries(endpoints).filter(([_, methods]) => methods.size > 0)) + + const uncovered = collect(this._cluster_spec) + const specified_but_not_provided = collect(this._input_spec) + const covered: Endpoints = {} + + for (const [path, methods] of Object.entries(uncovered)) { + if (specified_but_not_provided[path] === undefined) continue + + for (const method of [...methods]) { + if (!specified_but_not_provided[path].delete(method)) continue + + if (covered[path] === undefined) covered[path] = new Set() + covered[path].add(method) + uncovered[path].delete(method) + } + } + + const uncovered_count = count(uncovered) + const covered_count = count(covered) + const total_count = uncovered_count + covered_count + + write_json( + this._output_path, + { + $description: { + uncovered: 'Endpoints provided by the OpenSearch cluster but DO NOT exist in the specification', + covered: 'Endpoints both provided by the OpenSearch cluster and exist in the specification', + specified_but_not_provided: 'Endpoints NOT provided by the OpenSearch cluster but exist in the specification' + }, + counts: { + uncovered: uncovered_count, + uncovered_pct: Math.round(uncovered_count / total_count * 100 * 100) / 100, + covered: covered_count, + covered_pct: Math.round(covered_count / total_count * 100 * 100) / 100, + specified_but_not_provided: count(specified_but_not_provided) + }, + endpoints: { + uncovered: prune(uncovered), + covered: prune(covered), + specified_but_not_provided: prune(specified_but_not_provided) + } + }, + (_, value) => { + if (value instanceof Set) return [...value] + return value + }) + } +} diff --git a/tools/src/coverage/coverage.ts b/tools/src/coverage/coverage.ts new file mode 100644 index 00000000..8acd80d6 --- /dev/null +++ b/tools/src/coverage/coverage.ts @@ -0,0 +1,15 @@ +import { Command, Option } from '@commander-js/extra-typings' +import CoverageCalculator from './CoverageCalculator' +import { resolve } from 'path' + +const command = new Command() + .description('Calculates the coverage of a specification against an OpenSearch clusters generated specification.') + .addOption(new Option('--cluster ', 'path to the cluster\'s generated specification.').default(resolve(__dirname, '../../../build/opensearch-openapi-CLUSTER.yaml'))) + .addOption(new Option('--specification ', 'path to the specification to calculate coverage of.').default(resolve(__dirname, '../../../build/opensearch-openapi.yaml'))) + .addOption(new Option('-o, --output ', 'path to the output file.').default(resolve(__dirname, '../../../build/coverage.json'))) + .allowExcessArguments(false) + .parse() + +const opts = command.opts() +const calculator = new CoverageCalculator(opts.cluster, opts.specification, opts.output) +calculator.calculate() diff --git a/tools/src/dump-cluster-spec/dump-cluster-spec.ts b/tools/src/dump-cluster-spec/dump-cluster-spec.ts new file mode 100644 index 00000000..1d3de6ba --- /dev/null +++ b/tools/src/dump-cluster-spec/dump-cluster-spec.ts @@ -0,0 +1,76 @@ +import { Command, Option } from '@commander-js/extra-typings' +import { resolve } from 'path' +import axios from 'axios' +import * as https from 'node:https' +import * as process from 'node:process' +import { sleep, write_yaml } from '../../helpers' + +interface CommandOpts { + host: string + https: boolean + insecure: boolean + port: string | number + username: string + password?: string + output: string +} + +async function main (opts: CommandOpts): Promise { + const url = `http${opts.https ? 's' : ''}://${opts.host}:${opts.port}` + const client = axios.create({ + httpsAgent: new https.Agent({ + rejectUnauthorized: !opts.insecure + }), + auth: opts.password !== undefined + ? { + username: opts.username, + password: opts.password + } + : undefined + }) + + let attempt = 0 + while (true) { + attempt += 1 + try { + const info = await client.get(url) + console.log(info.data) + break + } catch (e) { + if (attempt >= 20) { + throw e + } + await sleep(5000) + } + } + + const cluster_spec = await client.get(`${url}/_plugins/api`) + + write_yaml(opts.output, cluster_spec.data) +} + +const command = new Command() + .description('Dumps an OpenSearch cluster\'s generated specification.') + .addOption(new Option('--host ', 'cluster\'s host').default('localhost')) + .addOption(new Option('--no-https', 'disable HTTPS')) + .addOption(new Option('--insecure', 'disable SSL/TLS certificate verification').default(false)) + .addOption(new Option('--port ', 'cluster\'s port to connect to').default(9200)) + .addOption(new Option('--username ', 'username to authenticate with the cluster').default('admin')) + .addOption(new Option('--password ', 'password to authenticate with the cluster').env('OPENSEARCH_PASSWORD')) + .addOption(new Option('--output ', 'path to the output file').default(resolve(__dirname, '../../../build/opensearch-openapi-CLUSTER.yaml'))) + .allowExcessArguments(false) + .parse() + +main(command.opts()) + .catch(e => { + if (e instanceof Error) { + console.error(`ERROR: ${e.stack}`) + while (e.cause !== undefined) { + console.error(`Caused by: ${e.stack}`) + e = e.cause + } + } else { + console.error('ERROR:', e) + } + process.exit(1) + }) diff --git a/tools/src/linter/components/base/FileValidator.ts b/tools/src/linter/components/base/FileValidator.ts index 3d3a5d29..6941458b 100644 --- a/tools/src/linter/components/base/FileValidator.ts +++ b/tools/src/linter/components/base/FileValidator.ts @@ -17,7 +17,7 @@ export default class FileValidator extends ValidatorBase { spec (): OpenAPIV3.Document { if (this._spec) return this._spec - this._spec = read_yaml(this.file_path) as OpenAPIV3.Document + this._spec = read_yaml(this.file_path) return this._spec } diff --git a/tools/src/merger/GlobalParamsGenerator.ts b/tools/src/merger/GlobalParamsGenerator.ts index e8da16ea..d883e081 100644 --- a/tools/src/merger/GlobalParamsGenerator.ts +++ b/tools/src/merger/GlobalParamsGenerator.ts @@ -7,7 +7,7 @@ export default class GlobalParamsGenerator { constructor (root_path: string) { const file_path = root_path + '/_global_parameters.yaml' - const spec = read_yaml(file_path) as OpenAPIV3.Document + const spec: OpenAPIV3.Document = read_yaml(file_path) this.global_params = this.create_global_params(spec) } diff --git a/tools/src/merger/SupersededOpsGenerator.ts b/tools/src/merger/SupersededOpsGenerator.ts index c8bffd3c..39570aaf 100644 --- a/tools/src/merger/SupersededOpsGenerator.ts +++ b/tools/src/merger/SupersededOpsGenerator.ts @@ -10,7 +10,7 @@ export default class SupersededOpsGenerator { constructor (root_path: string, logger: Logger | undefined) { this.logger = logger ?? new Logger(LogLevel.warn) const file_path = root_path + '/_superseded_operations.yaml' - this.superseded_ops = read_yaml(file_path) as SupersededOperationMap + this.superseded_ops = read_yaml(file_path) delete this.superseded_ops.$schema } diff --git a/tools/src/merger/merge.ts b/tools/src/merger/merge.ts index a8b33526..c38f29c9 100644 --- a/tools/src/merger/merge.ts +++ b/tools/src/merger/merge.ts @@ -5,7 +5,7 @@ import { resolve } from 'path' const command = new Command() .description('Merges the multi-file OpenSearch spec into a single file for programmatic use.') .addOption(new Option('-s, --source ', 'path to the root folder of the multi-file spec').default(resolve(__dirname, '../../../spec'))) - .addOption(new Option('-o, --output ', 'output file name').default(resolve(__dirname, '../../opensearch-openapi.yaml'))) + .addOption(new Option('-o, --output ', 'output file name').default(resolve(__dirname, '../../../build/opensearch-openapi.yaml'))) .allowExcessArguments(false) .parse()