diff --git a/.github/workflows/release-fixtures.yml b/.github/workflows/release-fixtures.yml new file mode 100644 index 000000000..87e1ce904 --- /dev/null +++ b/.github/workflows/release-fixtures.yml @@ -0,0 +1,23 @@ +name: Release Fixtures +on: + workflow_dispatch: + workflow_run: + workflows: [Release] + types: + - completed + + +jobs: + release: + runs-on: ubuntu-latest + steps: + - name: Ganerate Fixtures + env: + W3STORAGE_TOKEN: ${{ secrets.W3STORAGE_TOKEN }} + run: | + make fixtures.car + - name: Upload fixtures + uses: web3-storage/add-to-web3@v2 + with: + web3_token: ${{ secrets.W3STORAGE_TOKEN }} + path_to_add: 'fixtures.car' diff --git a/.github/workflows/test-prod-e2e.yml b/.github/workflows/test-prod-e2e.yml new file mode 100644 index 000000000..8819deec2 --- /dev/null +++ b/.github/workflows/test-prod-e2e.yml @@ -0,0 +1,115 @@ +name: Test Production (e2e) + +on: + workflow_dispatch: + push: + branches: + - main + pull_request: + +concurrency: + group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.event_name == 'push' && github.sha || github.ref }} + cancel-in-progress: true + +jobs: + upload-fixtures: + runs-on: "ubuntu-latest" + defaults: + run: + shell: bash + steps: + - name: Setup Go + uses: actions/setup-go@v3 + with: + go-version: 1.20.4 + - uses: actions/checkout@v3 + with: + path: "gateway-conformance" + - name: Extract fixtures + uses: ./gateway-conformance/.github/actions/extract-fixtures + with: + output: ./ + merged: true + # https://web3.storage/docs/how-tos/store/#storing-ipfs-content-archives + - name: Upload fixtures + id: upload + uses: web3-storage/add-to-web3@v2 + with: + web3_token: ${{ secrets.W3STORAGE_TOKEN }} + path_to_add: 'fixtures.car' + - name: Wait for pinning + run: | + sleep 180 # 3 minutes + # see rational in https://github.com/ipfs/gateway-conformance/pull/108#discussion_r1274628865 + test: + runs-on: "ubuntu-latest" + strategy: + matrix: + target: ["ipfs.runfission.com", "w3s.link"] + fail-fast: false + defaults: + run: + shell: bash + needs: upload-fixtures + steps: + - name: Setup Go + uses: actions/setup-go@v3 + with: + go-version: 1.20.4 + - uses: actions/checkout@v3 + with: + path: "gateway-conformance" + - name: Run the tests + uses: ./gateway-conformance/.github/actions/test + with: + gateway-url: https://${{ matrix.target }} + subdomain-url: https://${{ matrix.target }} + json: output.json + xml: output.xml + html: output.html + markdown: output.md + - name: Upload one-page HTML report + if: (failure() || success()) + uses: actions/upload-artifact@v3 + with: + name: conformance-${{ matrix.target }}.html + path: ./output.html + - name: Upload JSON output + if: (failure() || success()) + uses: actions/upload-artifact@v3 + with: + name: conformance-${{ matrix.target }}.json + path: ./output.json + aggregate: + runs-on: "ubuntu-latest" + needs: [test] + # the tests might have failed + if: always() + defaults: + run: + shell: bash + steps: + - uses: actions/checkout@v3 + with: + path: "gateway-conformance" + - name: Download Artifacts + uses: actions/download-artifact@v3 + with: + path: artifacts + - name: Aggregate results + working-directory: ./artifacts + run: | + mkdir ./aggregates + + # download-artifact downloads artifacts in a directory named after the artifact + # details: https://github.com/actions/download-artifact#download-all-artifacts + for folder in ./conformance-*.json; do + file="${folder}/output.json" + new_file="aggregates/${folder#conformance-}" + jq -ns 'inputs' "$file" | node ../gateway-conformance/aggregate.js 1 > "${new_file}" + done + + node ../gateway-conformance/aggregate-into-table.js ./aggregates/*.json > ./table.md + - name: Set summary + if: (failure() || success()) + run: cat ./artifacts/table.md >> $GITHUB_STEP_SUMMARY \ No newline at end of file diff --git a/.github/workflows/update-badge.yml b/.github/workflows/update-badge.yml new file mode 100644 index 000000000..4c647c439 --- /dev/null +++ b/.github/workflows/update-badge.yml @@ -0,0 +1,63 @@ +# Note: this workflow requires the repository to give Write access to Github Workflows. +# in Settings > Actions > General > Workflow permissions + +permissions: + contents: write +name: Update Badge + +on: + workflow_run: + workflows: + - Test Production (e2e) + types: + - completed + branches: + - main + +defaults: + run: + shell: bash + +concurrency: + group: ${{ github.workflow }} + cancel-in-progress: true + +jobs: + update-badge: + runs-on: ubuntu-latest + steps: + - uses: pl-strflt/job-summary-url-action@v1 + id: metadata + with: + workflow: test-prod-e2e.yml # ${{ github.event.workflow.path }} + run_id: ${{ github.event.workflow_run.id }} + run_attempt: ${{ github.event.workflow_run.run_attempt }} + job: aggregate + - uses: actions/checkout@v3 + # https://github.com/orgs/community/discussions/26560 + - run: | + git config user.email "41898282+github-actions[bot]@users.noreply.github.com" + git config user.name "github-actions[bot]" + - run: | + echo GITHUB_JOB_SUMMARY_URL=${GITHUB_JOB_SUMMARY_URL} + IN='[![Conformance Production Dashboard](https://github.com/ipfs/gateway-conformance/actions/workflows/test-prod-e2e.yml/badge.svg?branch=master)](.*)' + ESCAPED_IN=$(printf '%s\n' "$IN" | sed -e 's/[][\/!&]/\\&/g') + + OUT="[![Conformance Production Dashboard](https://github.com/ipfs/gateway-conformance/actions/workflows/test-prod-e2e.yml/badge.svg?branch=master)](${GITHUB_JOB_SUMMARY_URL})" + + sed -i "s;${ESCAPED_IN};${OUT};" README.md + env: + GITHUB_JOB_SUMMARY_URL: ${{ steps.metadata.outputs.job_summary_url }} + REPOSITORY: ${{ github.repository }} + - id: git + run: | + if [[ -n $(git diff --shortstat 2> /dev/null | tail -n1) ]]; then + echo "dirty=1" >> $GITHUB_OUTPUT + else + echo "dirty=0" >> $GITHUB_OUTPUT + fi + - if: steps.git.outputs.dirty == '1' + run: | + git add README.md + git commit -m 'chore: update the link to the dashboard [skip ci]' + git push diff --git a/CHANGELOG.md b/CHANGELOG.md index 18df60519..e728d1094 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -10,6 +10,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Changed - finalized port of Kubo's sharness tests. [PR](https://github.com/ipfs/gateway-conformance/pull/92) +- `extract-fixtures --merged` generates a car version 1 with a single root now ## [0.2.0] - 2023-06-26 ### Added diff --git a/README.md b/README.md index 6bbb7e873..c0db0ba61 100644 --- a/README.md +++ b/README.md @@ -2,6 +2,8 @@ `gateway-conformance` is a tool designed to test if an IPFS Gateway implementation complies with the IPFS Gateway Specification correctly. The tool is distributed as a Docker image, as well as a GitHub Action(s). +[![Conformance Production Dashboard](https://github.com/ipfs/gateway-conformance/actions/workflows/test-prod-e2e.yml/badge.svg?branch=master)]() + ## Table of Contents - [Commands](#commands) diff --git a/aggregate-into-table.js b/aggregate-into-table.js new file mode 100644 index 000000000..67ea9012a --- /dev/null +++ b/aggregate-into-table.js @@ -0,0 +1,86 @@ +const fs = require("fs"); + +// retrieve the list of input files from the command line +const files = process.argv.slice(2); + +// read all input files (json) +const inputs = files.map((file) => { + return JSON.parse(fs.readFileSync(file, 'utf8')); +} +); + +// merge all the unique keys from all the inputs +let keys = new Set(); +inputs.forEach((input) => { + Object.keys(input).forEach((key) => { + keys.add(key); + }); +}); +keys = Array.from(keys).sort(); + +// generate a table +const columns = []; + +// add the leading column ("gateway", "key1", "key2", ... "keyN") +const leading = ["gateway"]; +keys.forEach((key) => { + // Skip the "Test" prefix + const niceKey = key.replace(/^Test/, ''); + leading.push(niceKey); +}); +columns.push(leading); + +// add the data for every input +const cellRender = (cell) => { + if (cell === null) { + return ''; + } + + if (cell['fail'] > 0) { + return `:red_circle: (${cell['pass']} / ${cell['total']})`; + } + if (cell['skip'] > 0) { + return `:yellow_circle: (skipped)`; + } + if (cell['pass'] > 0) { + return `:green_circle: (${cell['pass']} / ${cell['total']})`; + } + + throw new Error(`Unhandled cell value: ${JSON.stringify(cell)}`); +} + +inputs.forEach((input, index) => { + // clean name (remove path and extension) + let name = files[index].replace(/\.json$/, '').replace(/^.*\//, ''); + + const col = [name]; + keys.forEach((key) => { + col.push(cellRender(input[key] || null)); + }); + columns.push(col); +}); + +// # Rotate the table +// it's easier to create the table by column, but we want to render it by row +let rows = columns[0].map((_, i) => columns.map(col => col[i])); + +// # Render the table into a markdown table + +// add the hyphen header row after the first row +const hyphenated = rows[0].map((x, i) => { + if (i === 0) { + return '-'.repeat(Math.max(0, x.length - 2)) + '-:' + } + return ':-' + '-'.repeat(Math.max(0, x.length - 2)); +}) + +rows = [ + rows[0], + hyphenated, + ...rows.slice(1), +] + +let markdown = rows.map(row => '| ' + row.join(' | ') + ' |').join('\n'); + +// output the table to stdout +fs.writeFileSync(1, markdown); diff --git a/aggregate.js b/aggregate.js new file mode 100644 index 000000000..347096409 --- /dev/null +++ b/aggregate.js @@ -0,0 +1,94 @@ +const fs = require("fs"); + +// # read json from stdin: +let lines = fs.readFileSync(0, "utf-8"); +lines = JSON.parse(lines); + +// # clean input +lines = lines.filter((line) => { + const { Test } = line; + return Test !== undefined; +}); + +lines = lines.filter((line) => { + const { Action } = line; + return ["pass", "fail", "skip"].includes(Action); +}); + +// # add "Path" field by parsing "Name" and split by "/" +// also update the name to make it readable +// also remove "Time" field while we're at it +lines = lines.map((line) => { + const { Test, Time, ...rest } = line; + const path = Test.split("/").map((name) => { + return name.replace(/_/g, " "); + }); + + return { ...rest, Path: path }; +}); + +// # Aggregate all known "Path" values, use a tree structure to represent it +// { +// child1: { +// child2: { +// ..., +// } +// } +// } +const testTree = {}; + +lines.forEach((line) => { + const { Path } = line; + let current = testTree; + + Path.forEach((path) => { + if (!current[path]) { + current[path] = {}; + } + current = current[path]; + }); +}) + +// # Drop all lines where the Test "Path" does not point to a leaf +// if the test has children then we don't really care about it's pass / fail / skip status, +// we'll aggregate its children results' +lines = lines.filter((line) => { + const { Path } = line; + let current = testTree; + + Path.forEach((path) => { + if (!current[path]) { + return false; + } + current = current[path]; + }); + + // if current has children, it is not a leaf + return Object.keys(current).length === 0; +}); + +// # Aggregate by Path and count actions + +const depth = process.argv[2] && parseInt(process.argv[2], 10) || 1; + +// test result is a map { [path_str]: { [path], [action]: count } } +const testResults = {}; + +lines.forEach((line) => { + const { Path, Action } = line; + let current = testResults; + + const path = Path.slice(0, depth) + const key = path.join(" > "); + + if (!current[key]) { + current[key] = {Path: path, "pass": 0, "fail": 0, "skip": 0, "total": 0}; + } + current = current[key]; + + current[Action] += 1; + current["total"] += 1; +}); + +// output result to stdout +fs.writeFileSync(1, JSON.stringify(testResults, null, 2)); diff --git a/go.mod b/go.mod index e4a132b7b..2aae65d3a 100644 --- a/go.mod +++ b/go.mod @@ -47,7 +47,7 @@ require ( github.com/hashicorp/golang-lru v0.5.4 // indirect github.com/ipfs/bbloom v0.0.4 // indirect github.com/ipfs/go-bitfield v1.1.0 // indirect - github.com/ipfs/go-block-format v0.1.2 // indirect + github.com/ipfs/go-block-format v0.1.2 github.com/ipfs/go-datastore v0.6.0 // indirect github.com/ipfs/go-ipfs-util v0.0.2 // indirect github.com/ipfs/go-ipld-cbor v0.0.6 // indirect diff --git a/tooling/car/merge.go b/tooling/car/merge.go index 6c00f4595..ab0ada373 100644 --- a/tooling/car/merge.go +++ b/tooling/car/merge.go @@ -4,14 +4,32 @@ import ( "context" "fmt" + blocks "github.com/ipfs/go-block-format" "github.com/ipfs/go-cid" + carv2 "github.com/ipld/go-car/v2" "github.com/ipld/go-car/v2/blockstore" + "github.com/ipld/go-ipld-prime/fluent" + "github.com/ipld/go-ipld-prime/linking" + cidlink "github.com/ipld/go-ipld-prime/linking/cid" + basicnode "github.com/ipld/go-ipld-prime/node/basic" + "github.com/ipld/go-ipld-prime/storage/memstore" ) -func Merge(inputPaths []string, outputPath string) error { - // First list all the roots in our fixtures - roots := make([]cid.Cid, 0) +// https://github.com/ipld/go-ipld-prime/blob/65bfa53512f2328d19273e471ce4fd6d964055a2/storage/bsadapter/bsadapter.go#L111C1-L120C2 +func cidFromBinString(key string) (cid.Cid, error) { + l, k, err := cid.CidFromBytes([]byte(key)) + if err != nil { + return cid.Undef, fmt.Errorf("bsrvadapter: key was not a cid: %w", err) + } + if l != len(key) { + return cid.Undef, fmt.Errorf("bsrvadapter: key was not a cid: had %d bytes leftover", len(key)-l) + } + return k, nil +} +func Merge(inputPaths []string, outputPath string) error { + // First list all the unique roots in our fixtures + uniqRoots := make(map[string]cid.Cid) for _, path := range inputPaths { fmt.Printf("processing %s\n", path) robs, err := blockstore.OpenReadOnly(path, @@ -26,20 +44,83 @@ func Merge(inputPaths []string, outputPath string) error { return err } - roots = append(roots, r...) + for _, root := range r { + uniqRoots[root.String()] = root + } + } + + roots := make([]cid.Cid, 0) + for _, root := range uniqRoots { + roots = append(roots, root) } + // Then aggregate all roots under a single one + lsys := cidlink.DefaultLinkSystem() + store := memstore.Store{Bag: make(map[string][]byte)} + lsys.SetWriteStorage(&store) + lsys.SetReadStorage(&store) + + // Adding to a map, they won't accept duplicate, hence the need for the uniqRoots + node := fluent.MustBuildMap(basicnode.Prototype.Map, int64(len(roots)), func(ma fluent.MapAssembler) { + ma.AssembleEntry("Links").CreateList(int64(len(roots)), func(na fluent.ListAssembler) { + for _, root := range roots { + na.AssembleValue().CreateMap(3, func(fma fluent.MapAssembler) { + fma.AssembleEntry("Hash").AssignLink(cidlink.Link{Cid: root}) + }) + } + }) + }) + + lp := cidlink.LinkPrototype{Prefix: cid.Prefix{ + Version: 1, + Codec: 0x70, // dag-pb + MhType: 0x12, + MhLength: 32, // sha2-256 + }} + + lnk, err := lsys.Store( + linking.LinkContext{}, + lp, + node) + if err != nil { + return err + } + + rootCid := lnk.(cidlink.Link).Cid + // Now prepare our new CAR file - fmt.Printf("Opening the %s file, with roots: %v\n", outputPath, roots) - rout, err := blockstore.OpenReadWrite(outputPath, roots) + fmt.Printf("Opening the %s file, with root: %v\n", outputPath, rootCid) + options := []carv2.Option{blockstore.WriteAsCarV1(true)} + rout, err := blockstore.OpenReadWrite(outputPath, []cid.Cid{rootCid}, options...) if err != nil { return err } + // Add blocks from our store (root block) + for k, v := range store.Bag { + // cid.Parse and cid.Decode does not work here, using: + // https://github.com/ipld/go-ipld-prime/blob/65bfa53512f2328d19273e471ce4fd6d964055a2/storage/bsadapter/bsadapter.go#L87-L89 + c, err := cidFromBinString(k) + if err != nil { + return err + } + + blk, err := blocks.NewBlockWithCid(v, c) + if err != nil { + return err + } + + err = rout.Put(context.Background(), blk) + if err != nil { + return err + } + } + // Then aggregate all our blocks. for _, path := range inputPaths { fmt.Printf("processing %s\n", path) - robs, err := blockstore.OpenReadOnly(path, + robs, err := blockstore.OpenReadOnly( + path, blockstore.UseWholeCIDs(true), ) if err != nil { @@ -52,7 +133,6 @@ func Merge(inputPaths []string, outputPath string) error { } for c := range cids { - fmt.Printf("Adding %s\n", c.String()) block, err := robs.Get(context.Background(), c) if err != nil { return err @@ -64,5 +144,6 @@ func Merge(inputPaths []string, outputPath string) error { fmt.Printf("Finalizing...\n") err = rout.Finalize() + return err }