From e509958ad952076caef209c985beef3dbd717ea1 Mon Sep 17 00:00:00 2001 From: Prithvi Shahi Date: Thu, 5 Sep 2024 10:48:52 -0700 Subject: [PATCH 1/3] chore: Create funding.json Add funding.json for application to OP rPGF round 5 --- funding.json | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 funding.json diff --git a/funding.json b/funding.json new file mode 100644 index 000000000..bcf7fc278 --- /dev/null +++ b/funding.json @@ -0,0 +1,5 @@ +{ + "opRetro": { + "projectId": "0x966804cb492e1a4bde5d781a676a44a23d69aa5dd2562fa7a4f95bb606021c8b" + } +} From e5e32c7c0a513e5d22a0f9b626103f37b8a311ac Mon Sep 17 00:00:00 2001 From: Piotr Galar Date: Wed, 11 Sep 2024 10:01:42 +0200 Subject: [PATCH 2/3] chore: parameterise s3 build cache setup (#465) * chore: parameterise s3 build cache setup * Apply suggestions from code review Co-authored-by: Alex Potsides * fix: cache load/push * chore: pass creds when reading cache * fix: interact with aws only using the CLI * Update transport-interop.yml --------- Co-authored-by: Alex Potsides --- .../run-interop-hole-punch-test/action.yml | 5 +++ .../actions/run-interop-ping-test/action.yml | 5 +++ .../run-transport-interop-test/action.yml | 5 +++ .github/workflows/hole-punch-interop.yml | 6 ++-- .github/workflows/transport-interop.yml | 8 ++--- hole-punch-interop/helpers/cache.ts | 36 ++++++++++--------- transport-interop/helpers/cache.ts | 36 ++++++++++--------- 7 files changed, 62 insertions(+), 39 deletions(-) diff --git a/.github/actions/run-interop-hole-punch-test/action.yml b/.github/actions/run-interop-hole-punch-test/action.yml index f73647469..5ef607b06 100644 --- a/.github/actions/run-interop-hole-punch-test/action.yml +++ b/.github/actions/run-interop-hole-punch-test/action.yml @@ -86,6 +86,11 @@ runs: - name: Load cache and build working-directory: ${{ steps.find-workdir.outputs.WORK_DIR }} + env: + AWS_BUCKET: ${{ inputs.s3-cache-bucket }} + AWS_REGION: ${{ inputs.aws-region }} + AWS_ACCESS_KEY_ID: ${{ inputs.s3-access-key-id }} + AWS_SECRET_ACCESS_KEY: ${{ inputs.s3-secret-access-key }} run: npm run cache -- load shell: bash diff --git a/.github/actions/run-interop-ping-test/action.yml b/.github/actions/run-interop-ping-test/action.yml index b351f1d17..434ed19d1 100644 --- a/.github/actions/run-interop-ping-test/action.yml +++ b/.github/actions/run-interop-ping-test/action.yml @@ -78,6 +78,11 @@ runs: - name: Load cache and build working-directory: ${{ steps.find-workdir.outputs.WORK_DIR }} + env: + AWS_BUCKET: ${{ inputs.s3-cache-bucket }} + AWS_REGION: ${{ inputs.aws-region }} + AWS_ACCESS_KEY_ID: ${{ inputs.s3-access-key-id }} + AWS_SECRET_ACCESS_KEY: ${{ inputs.s3-secret-access-key }} run: npm run cache -- load shell: bash diff --git a/.github/actions/run-transport-interop-test/action.yml b/.github/actions/run-transport-interop-test/action.yml index 09eaaf33d..8122f4d34 100644 --- a/.github/actions/run-transport-interop-test/action.yml +++ b/.github/actions/run-transport-interop-test/action.yml @@ -78,6 +78,11 @@ runs: - name: Load cache and build working-directory: ${{ steps.find-workdir.outputs.WORK_DIR }} + env: + AWS_BUCKET: ${{ inputs.s3-cache-bucket }} + AWS_REGION: ${{ inputs.aws-region }} + AWS_ACCESS_KEY_ID: ${{ inputs.s3-access-key-id }} + AWS_SECRET_ACCESS_KEY: ${{ inputs.s3-secret-access-key }} run: npm run cache -- load shell: bash diff --git a/.github/workflows/hole-punch-interop.yml b/.github/workflows/hole-punch-interop.yml index 723cc2327..d306da2b7 100644 --- a/.github/workflows/hole-punch-interop.yml +++ b/.github/workflows/hole-punch-interop.yml @@ -28,7 +28,7 @@ jobs: - uses: actions/checkout@v3 - uses: ./.github/actions/run-interop-hole-punch-test with: - s3-cache-bucket: libp2p-by-tf-aws-bootstrap - s3-access-key-id: ${{ vars.S3_AWS_ACCESS_KEY_ID }} - s3-secret-access-key: ${{ secrets.S3_AWS_SECRET_ACCESS_KEY }} + s3-cache-bucket: ${{ vars.S3_LIBP2P_BUILD_CACHE_BUCKET_NAME }} + s3-access-key-id: ${{ vars.S3_LIBP2P_BUILD_CACHE_AWS_ACCESS_KEY_ID }} + s3-secret-access-key: ${{ secrets.S3_LIBP2P_BUILD_CACHE_AWS_SECRET_ACCESS_KEY }} worker-count: 16 diff --git a/.github/workflows/transport-interop.yml b/.github/workflows/transport-interop.yml index 275ebb1bf..e5fcc6dc2 100644 --- a/.github/workflows/transport-interop.yml +++ b/.github/workflows/transport-interop.yml @@ -18,12 +18,12 @@ jobs: - uses: actions/checkout@v3 - uses: ./.github/actions/run-transport-interop-test with: - s3-cache-bucket: libp2p-by-tf-aws-bootstrap - s3-access-key-id: ${{ vars.S3_AWS_ACCESS_KEY_ID }} - s3-secret-access-key: ${{ secrets.S3_AWS_SECRET_ACCESS_KEY }} + s3-cache-bucket: ${{ vars.S3_LIBP2P_BUILD_CACHE_BUCKET_NAME }} + s3-access-key-id: ${{ vars.S3_LIBP2P_BUILD_CACHE_AWS_ACCESS_KEY_ID }} + s3-secret-access-key: ${{ secrets.S3_LIBP2P_BUILD_CACHE_AWS_SECRET_ACCESS_KEY }} worker-count: 16 build-without-secrets: - runs-on: ubuntu-latest + runs-on: ['self-hosted', 'linux', 'x64', '4xlarge'] # https://github.com/pl-strflt/tf-aws-gh-runner/blob/main/runners.tf steps: - uses: actions/checkout@v3 # Purposely not using secrets to replicate how forks will behave. diff --git a/hole-punch-interop/helpers/cache.ts b/hole-punch-interop/helpers/cache.ts index a5d07fdf8..cb54ad4e0 100755 --- a/hole-punch-interop/helpers/cache.ts +++ b/hole-punch-interop/helpers/cache.ts @@ -1,8 +1,9 @@ -const AWS_BUCKET = process.env.AWS_BUCKET || 'libp2p-by-tf-aws-bootstrap'; +const AWS_BUCKET = process.env.AWS_BUCKET; const scriptDir = __dirname; import * as crypto from 'crypto'; import * as fs from 'fs'; +import * as os from 'os'; import * as path from 'path'; import * as child_process from 'child_process'; import ignore, { Ignore } from 'ignore' @@ -76,10 +77,14 @@ async function loadCacheOrBuild(dir: string, ig: Ignore) { if (mode == Mode.PushCache) { console.log("Pushing cache") try { - const res = await fetch(`https://s3.amazonaws.com/${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz`, {method: "HEAD"}) - if (res.ok) { + if (!AWS_BUCKET) { + throw new Error("AWS_BUCKET not set") + } + try { + child_process.execSync(`aws s3 ls s3://${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz`) console.log("Cache already exists") - } else { + } catch (e) { + console.log("Cache doesn't exist", e) // Read image id from image.json const imageID = JSON.parse(fs.readFileSync(path.join(dir, 'image.json')).toString()).imageID; console.log(`Pushing cache for ${dir}: ${imageID}`) @@ -96,18 +101,17 @@ async function loadCacheOrBuild(dir: string, ig: Ignore) { console.log("Loading cache") let cacheHit = false try { - // Check if the cache exists - const res = await fetch(`https://s3.amazonaws.com/${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz`, {method: "HEAD"}) - if (res.ok) { - const dockerLoadedMsg = child_process.execSync(`curl https://s3.amazonaws.com/${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz | docker image load`).toString(); - const loadedImageId = dockerLoadedMsg.match(/Loaded image( ID)?: (.*)/)[2]; - if (loadedImageId) { - console.log(`Cache hit for ${loadedImageId}`); - fs.writeFileSync(path.join(dir, 'image.json'), JSON.stringify({imageID: loadedImageId}) + "\n"); - cacheHit = true - } - } else { - console.log("Cache not found") + if (!AWS_BUCKET) { + throw new Error("AWS_BUCKET not set") + } + const cachePath = fs.mkdtempSync(path.join(os.tmpdir(), 'cache')) + const archivePath = path.join(cachePath, 'archive.tar.gz') + const dockerLoadedMsg = child_process.execSync(`aws s3 cp s3://${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz ${archivePath} && docker image load -i ${archivePath}`).toString(); + const loadedImageId = dockerLoadedMsg.match(/Loaded image( ID)?: (.*)/)[2]; + if (loadedImageId) { + console.log(`Cache hit for ${loadedImageId}`); + fs.writeFileSync(path.join(dir, 'image.json'), JSON.stringify({imageID: loadedImageId}) + "\n"); + cacheHit = true } } catch (e) { console.log("Cache not found:", e) diff --git a/transport-interop/helpers/cache.ts b/transport-interop/helpers/cache.ts index 697e452fb..5504a0c0f 100755 --- a/transport-interop/helpers/cache.ts +++ b/transport-interop/helpers/cache.ts @@ -1,8 +1,9 @@ -const AWS_BUCKET = process.env.AWS_BUCKET || 'libp2p-by-tf-aws-bootstrap'; +const AWS_BUCKET = process.env.AWS_BUCKET; const scriptDir = __dirname; import * as crypto from 'crypto'; import * as fs from 'fs'; +import * as os from 'os'; import * as path from 'path'; import * as child_process from 'child_process'; import ignore, { Ignore } from 'ignore' @@ -65,10 +66,14 @@ switch (modeStr) { if (mode == Mode.PushCache) { console.log("Pushing cache") try { - const res = await fetch(`https://s3.amazonaws.com/${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz`, { method: "HEAD" }) - if (res.ok) { + if (!AWS_BUCKET) { + throw new Error("AWS_BUCKET not set") + } + try { + child_process.execSync(`aws s3 ls s3://${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz`) console.log("Cache already exists") - } else { + } catch (e) { + console.log("Cache doesn't exist", e) // Read image id from image.json const imageID = JSON.parse(fs.readFileSync(path.join(implFolder, 'image.json')).toString()).imageID; console.log(`Pushing cache for ${impl}: ${imageID}`) @@ -85,18 +90,17 @@ switch (modeStr) { console.log("Loading cache") let cacheHit = false try { - // Check if the cache exists - const res = await fetch(`https://s3.amazonaws.com/${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz`, { method: "HEAD" }) - if (res.ok) { - const dockerLoadedMsg = child_process.execSync(`curl https://s3.amazonaws.com/${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz | docker image load`).toString(); - const loadedImageId = dockerLoadedMsg.match(/Loaded image( ID)?: (.*)/)[2]; - if (loadedImageId) { - console.log(`Cache hit for ${loadedImageId}`); - fs.writeFileSync(path.join(implFolder, 'image.json'), JSON.stringify({ imageID: loadedImageId }) + "\n"); - cacheHit = true - } - } else { - console.log("Cache not found") + if (!AWS_BUCKET) { + throw new Error("AWS_BUCKET not set") + } + const cachePath = fs.mkdtempSync(path.join(os.tmpdir(), 'cache')) + const archivePath = path.join(cachePath, 'archive.tar.gz') + const dockerLoadedMsg = child_process.execSync(`aws s3 cp s3://${AWS_BUCKET}/imageCache/${cacheKey}-${arch}.tar.gz ${archivePath} && docker image load -i ${archivePath}`).toString(); + const loadedImageId = dockerLoadedMsg.match(/Loaded image( ID)?: (.*)/)[2]; + if (loadedImageId) { + console.log(`Cache hit for ${loadedImageId}`); + fs.writeFileSync(path.join(implFolder, 'image.json'), JSON.stringify({ imageID: loadedImageId }) + "\n"); + cacheHit = true } } catch (e) { console.log("Cache not found:", e) From c164b240492dc9f818af37db2edbcaa79b97ef70 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Wed, 11 Sep 2024 08:25:10 +0000 Subject: [PATCH 3/3] chore: update the link to the interop dashboard [skip ci] --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 238d0bff6..f19fbeafc 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # Interoperability/end to end test-plans & performance benchmarking for libp2p -[![Interop Dashboard](https://github.com/libp2p/test-plans/workflows/libp2p%20transport%20interop%20test/badge.svg?branch=master)](https://github.com/libp2p/test-plans/actions/runs/10461977895/attempts/1#summary-28971362346) +[![Interop Dashboard](https://github.com/libp2p/test-plans/workflows/libp2p%20transport%20interop%20test/badge.svg?branch=master)](https://github.com/libp2p/test-plans/actions/runs/10807493695/attempts/1#summary-29978411046) [![Made by Protocol Labs](https://img.shields.io/badge/made%20by-Protocol%20Labs-blue.svg?style=flat-square)](http://protocol.ai)