Skip to content

Commit

Permalink
chore: various fixes to get e2e tests working for account service (#502)
Browse files Browse the repository at this point in the history
# Problem

After changing to the mono-repo, e2e tests do not work because the
scripts that execute them need updates, due to file locations changing.

# Solution

Update the e2e test workflow so that the tests can execute locally.

## Steps to Verify:

1. Follow the steps in `./developer-docs]account/README.md` and verify
that the tests execute correctly.

---------

Co-authored-by: Joe Caputo <[email protected]>
  • Loading branch information
mattheworris and JoeCap08055 authored Sep 16, 2024
1 parent ddba0b2 commit cfdf57c
Show file tree
Hide file tree
Showing 23 changed files with 6,029 additions and 2,956 deletions.
102 changes: 102 additions & 0 deletions .github/workflows/e2e-tests.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
name: E2E Tests

on:
pull_request:
branches:
- main
push:
branches:
- main

jobs:
service-matrix:
runs-on: ubuntu-latest
outputs:
changes: ${{ steps.determine-matrix.outputs.changes }}
# NOTE: only account E2E tests enabled in pipeline currently; add other services as they are updated
# IF you add a new filter it ALSO needs to be here
services: >-
["account"]
# Resolves to true if it should run everything, aka when common files change
run-all: ${{ steps.determine-matrix.outputs.changes.common }}
steps:
- name: Check Out Repo
uses: actions/checkout@v4
- uses: dorny/paths-filter@v3
id: determine-matrix
with:
# Adding a filter? Check for the need to add to the outputs as well
filters: |
common:
- 'Docker/**'
- 'tools/ci-k6/**'
- '.github/**'
- 'docker-compose.yaml'
- 'docker-compose-e2e.*.yaml'
- 'libs/types'
account:
- 'apps/account-api/**'
- 'apps/account-worker/**'
- 'libs/account-lib/**'
graph:
- 'apps/graph-api/**'
- 'apps/graph-worker/**'
- 'libs/graph-lib/**'
content-publishing:
- 'apps/content-publishing-api/**'
- 'apps/content-publishing-worker/**'
- 'libs/content-publishing-lib/**'
content-watcher:
- 'apps/content-watcher/**'
- 'libs/content-watcher-lib/**'
build:
name: '[${{ matrix.service }}] E2E Tests'
runs-on: ubuntu-latest
needs: service-matrix
strategy:
fail-fast: false
matrix:
service: ${{ fromJson(needs.service-matrix.outputs.services) }}
steps:
- name: Run or Skip
id: should
run: echo "RUN=${{ needs.service-matrix.outputs.run-all || contains(fromJson(needs.service-matrix.outputs.changes), matrix.service) }}" >> "$GITHUB_OUTPUT"

- name: Checkout
if: ${{ steps.should.outputs.RUN == 'true' }}
uses: actions/checkout@v4

- name: Install Node.js
if: ${{ steps.should.outputs.RUN == 'true' }}
uses: actions/setup-node@v4
with:
node-version: 20.16.0
cache: 'npm'
registry-url: 'https://registry.npmjs.org'
cache-dependency-path: tools/ci-k6/package-lock.json

- name: Install dependencies
if: ${{ steps.should.outputs.RUN == 'true' }}
run: npm ci

- name: Start Frequency
if: ${{ steps.should.outputs.RUN == 'true' }}
# NOTE: the 'sleep' below can be shortened or possibly eliminated once we switch from the 'dsnp/instant-seal-node-with-deployed-schemas'
# to the 'frequencychain/standalone-node' with schemas in the genesis block (after Frequency 1.13 is deployed to Mainnet & the image is updated)
run: |
docker compose -f docker-compose.yaml -f docker-compose-e2e.${{ matrix.service }}.yaml --profile e2e up -d
sleep 15
# TODO: make a service-agnostic setup script, or make service-specific setup scripts all in one place
- name: Generate Provider and Capacity
if: ${{ steps.should.outputs.RUN == 'true' }}
working-directory: apps/account-api/test/setup
run: npm ci && npm run main

- name: Run E2E Tests
if: ${{ steps.should.outputs.RUN == 'true' }}
run: npm run test:e2e:${{ matrix.service }}
- name: Stop Docker Compose
if: ${{ steps.should.outputs.RUN == 'true' || failure() }}
run: docker compose -f docker-compose.yaml -f docker-compose-e2e.${{ matrix.service }}.yaml --profile e2e down
17 changes: 2 additions & 15 deletions .github/workflows/load-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -78,25 +78,12 @@ jobs:
working-directory: tools/ci-k6
run: npm ci

- name: Set up Docker Buildx
if: ${{ steps.should.outputs.RUN == 'true' }}
uses: docker/setup-buildx-action@v3
# Use GitHub Container Registry instead due to rate limits
with:
buildkitd-config-inline: |
[registry."ghcr.io"]
- name: Login to DockerHub
if: ${{ steps.should.outputs.RUN == 'true' }}
uses: docker/login-action@v3
with:
username: ${{secrets.DOCKERHUB_USERNAME}}
password: ${{secrets.DOCKERHUB_TOKEN}}

- name: Start Frequency
if: ${{ steps.should.outputs.RUN == 'true' }}
run: |
docker compose -f docker-compose.yaml -f docker-compose-k6.${{ matrix.service }}.yaml up -d frequency
sleep 5
sleep 15
- name: Generate Provider and Capacity
if: ${{ steps.should.outputs.RUN == 'true' }}
working-directory: tools/ci-k6
Expand Down
1 change: 1 addition & 0 deletions .prettierignore
Original file line number Diff line number Diff line change
Expand Up @@ -11,4 +11,5 @@ services/*/apps/api/src/metadata.ts
services/*/docs/index.html
**/metadata.ts
openapi-specs/*
**/target/**
jest.config.json
38 changes: 38 additions & 0 deletions Docker/Dockerfile.mock-webhook-server
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
# Stage 1: Build the application using static linking with the GNU toolchain
FROM rust:bullseye AS builder

# Set the working directory inside the container
WORKDIR /app

# Copy the Cargo.toml and Cargo.lock files to cache dependencies
COPY rust-webhook-server/Cargo.toml rust-webhook-server/Cargo.lock ./

# Create a dummy main.rs to allow Cargo to fetch dependencies
RUN mkdir src && echo "fn main() {}" > src/main.rs

# Fetch dependencies
RUN cargo build --release && rm -rf src target/release/rust-webhook-server

# Copy the rest of the application source code
COPY rust-webhook-server/ .
RUN touch src/main.rs

# Build the actual Rust application, enabling static linking for the standard library
RUN cargo build --release

# Stage 2: Use a minimal base image to run the application
FROM debian:bullseye

RUN apt-get update & apt-get install -y extra-runtime-dependencies & rm -rf /var/lib/apt/lists/*

# Set the working directory inside the container
WORKDIR /app

# Copy the statically linked binary from the builder stage
COPY --from=builder /app/target/release/rust-webhook-server .

# Expose the port your application will run on (adjust as necessary)
EXPOSE 3001

# Run the binary
CMD ["./rust-webhook-server"]
8 changes: 4 additions & 4 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,8 @@ generate-openapi:
generate-swagger-ui:
@npm run generate:swagger-ui

test: $(TEST_TARGETS)
# test-e2e: $(E2E_TARGETS)
.PHONY test: $(TEST_TARGETS)
test-e2e: $(E2E_TARGETS)
# test-k6: $(K6_TARGETS)

lint:
Expand All @@ -54,8 +54,8 @@ $(SWAGGER_TARGETS): swagger-%: openapi-%
$(TEST_TARGETS):
@npm run test:$(@:test-%=%)

# $(E2E_TARGETS):
# @( cd apps/$(@:test-e2e-%=%) ; npm run test:e2e )
$(E2E_TARGETS):
@npm run test:e2e:$(@:test-e2e-%=%)

# $(K6_TARGETS):
# @( cd apps/$(@:test-k6-%=%) ; npm run test:k6 )
Expand Down
8 changes: 4 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -68,31 +68,31 @@ Each Gateway service is an independent microservice.
<summary>Account Service</summary>

- [API Documentation](https://projectlibertylabs.github.io/gateway/account)
- [README](./services/account/README.md)
- [README](./developer-docs/account/README.md)

</details>

<details>
<summary>Graph Service</summary>

- [API Documentation](https://projectlibertylabs.github.io/gateway/graph/)
- [README](./services/graph/README.md)
- [README](./developer-docs/graph/README.md)

</details>

<details>
<summary>Content Publishing Service</summary>

- [API Documentation](https://projectlibertylabs.github.io/gateway/content-publishing/)
- [README](./services/content-publishing/README.md)
- [README](./developer-docs/content-publishing/README.md)

</details>

<details>
<summary>Content Watcher Service</summary>

- [API Documentation](https://projectlibertylabs.github.io/gateway/content-watcher/)
- [README](./services/content-watcher/README.md)
- [README](./developer-docs/content-watcher/README.md)

</details>

Expand Down
108 changes: 87 additions & 21 deletions apps/account-api/test/accounts.controller.e2e-spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,8 @@ import { ChainUser, ExtrinsicHelper, getClaimHandlePayload } from '@projectliber
import { uniqueNamesGenerator, colors, names } from 'unique-names-generator';
import { ApiModule } from '../src/api.module';
import { setupProviderAndUsers } from './e2e-setup.mock.spec';
import { WalletLoginRequestDto } from '#account-lib/types/dtos';
import { CacheMonitorService } from '#account-lib/cache/cache-monitor.service';

describe('Account Controller', () => {
let app: INestApplication;
Expand All @@ -16,9 +18,11 @@ describe('Account Controller', () => {
let users: ChainUser[];
let provider: ChainUser;
let maxMsaId: string;
let httpServer: any;

const handle = uniqueNamesGenerator({ dictionaries: [colors, names], separator: '', length: 2, style: 'capital' });

beforeEach(async () => {
beforeAll(async () => {
({ currentBlockNumber, maxMsaId, provider, users } = await setupProviderAndUsers());

const handlePayload = getClaimHandlePayload(users[0], handle, currentBlockNumber);
Expand Down Expand Up @@ -58,6 +62,12 @@ describe('Account Controller', () => {
app.useGlobalPipes(new ValidationPipe());
app.enableShutdownHooks();
await app.init();

httpServer = app.getHttpServer();

// Redis timeout keeping test suite alive for too long; disable
const cacheMonitor = app.get<CacheMonitorService>(CacheMonitorService);
cacheMonitor.startConnectionTimer = jest.fn();
});

afterAll(async () => {
Expand All @@ -68,31 +78,87 @@ describe('Account Controller', () => {
// do nothing
console.error(e);
}
});

it('(GET) /v1/accounts/:msaId with valid msaId and no handle', async () => {
const user = users[2];
const validMsaId = user.msaId?.toString();
await request(app.getHttpServer()).get(`/v1/accounts/${validMsaId}`).expect(200).expect({
msaId: user.msaId?.toString(),
await ExtrinsicHelper.disconnect();
await app.close();
await httpServer.close();

// Wait for some pending async stuff to finish
await new Promise<void>((resolve) => {
setTimeout(() => resolve(), 1000);
});
});

it('(GET) /v1/accounts/:msaId with invalid msaId', async () => {
const invalidMsaId = BigInt(maxMsaId) + 1000n;
await request(app.getHttpServer())
.get(`/v1/accounts/${invalidMsaId.toString()}`)
.expect(400)
.expect({ statusCode: 400, message: 'Failed to find the account' });
describe('(GET) /accounts', () => {
it('(GET) /v1/accounts/:msaId with valid msaId and no handle', async () => {
const user = users[2];
const validMsaId = user.msaId?.toString();
const { body } = await request(httpServer).get(`/v1/accounts/${validMsaId}`).expect(200).expect({
msaId: user.msaId?.toString(),
});
expect(body).not.toContain('handle');
});

it('(GET) /v1/accounts/:msaId with invalid msaId', async () => {
const invalidMsaId = BigInt(maxMsaId) + 1000n;
await request(httpServer)
.get(`/v1/accounts/${invalidMsaId.toString()}`)
.expect(404)
.expect({ statusCode: 404, message: 'Failed to find the account' });
});

it('(GET) /v1/accounts/:msaId with valid msaId and handle', async () => {
const user = users[0];
const validMsaId = user.msaId?.toString();
await request(httpServer)
.get(`/v1/accounts/${validMsaId}`)
.expect(200)
.expect((res) => res.body.msaId === validMsaId)
.expect((res) => res.body.handle.base_handle === handle);
});
});

it('(GET) /v1/accounts/:msaId with valid msaId and handle', async () => {
const user = users[0];
const validMsaId = user.msaId?.toString();
await request(app.getHttpServer())
.get(`/v1/accounts/${validMsaId}`)
.expect(200)
.expect((res) => res.body.msaId === validMsaId)
.expect((res) => res.body.handle.base_handle === handle);
describe('(POST) /accounts/siwf', () => {
it('Sign Up With Frequency request should work', async () => {
const siwfRequest: WalletLoginRequestDto = {
signIn: {},
signUp: {
extrinsics: [
{
pallet: 'msa',
extrinsicName: 'createSponsoredAccountWithDelegation',
encodedExtrinsic:
'0xed01043c01b01b4dcafc8a8e73bff98e7558249f53cd0e0e64fa6b8f0159f0913d4874d9360176644186458bad3b00bbd0ac21e6c9bd5a8bed9ced7a772d11a9aac025b47f6559468808e272696f596a02af230951861027c0dc30f7163ecf316838a0723483010000000000000014000000000000000000004d000000',
},
{
pallet: 'handles',
extrinsicName: 'claimHandle',
encodedExtrinsic:
'0xb901044200b01b4dcafc8a8e73bff98e7558249f53cd0e0e64fa6b8f0159f0913d4874d93601225508ae2da9804c60660a150277eb32b2a0f6b9c8f6e07dd6cad799cb31ae1dfb43896f488e9c0b7ec8b530d930b3f9b690683f2765d5def3fee3fc6540d58714656e6464794d000000',
},
],
},
};

await request(httpServer).post(`/v1/accounts/siwf`).send(siwfRequest).expect(201);
});

it('Sign In With Frequency request should work', (done) => {
const siwfRequest: WalletLoginRequestDto = {
signIn: {
siwsPayload: {
message:
'localhost wants you to sign in with your Frequency account:\n5Fghb4Wt3sg9cF6Q2Qucp5jXV5pL2U9uaYXwR9R8W8SYe9np\n\nThe domain localhost wants you to sign in with your Frequency account via localhost\n\nURI: http://localhost:5173/signin/confirm\nNonce: N6rLwqyz34oUxJEXJ\nIssued At: 2024-03-05T23:18:03.041Z\nExpiration Time: 2024-03-05T23:23:03.041Z',
signature:
'0x38faa2fc6f59bef8ffccfc929fb966e1d53ba45e3af7a029ea1d636eaddcbe78a4be0f89eaf7ff7bbaef20a070ad65f9d0f876889686687ef623214fddddb18b',
},
},
signUp: {
extrinsics: [],
},
};

request(httpServer).post(`/v1/accounts/siwf`).send(siwfRequest).expect(201).end(done);
});
});
});
Loading

0 comments on commit cfdf57c

Please sign in to comment.