diff --git a/.gitignore b/.gitignore index e81e4170d..da3053aa9 100644 --- a/.gitignore +++ b/.gitignore @@ -11,4 +11,5 @@ coverage out local.json globalConfig.json -.serverless \ No newline at end of file +.serverless +dist \ No newline at end of file diff --git a/Dockerfile.enhanced b/Dockerfile.enhanced index 3afe8e287..73a97287f 100644 --- a/Dockerfile.enhanced +++ b/Dockerfile.enhanced @@ -4,7 +4,7 @@ WORKDIR /home/docsworker-xlarge COPY config config/ COPY package*.json ./ COPY tsconfig*.json ./ -RUN npm ci +RUN npm ci --legacy-peer-deps COPY . ./ RUN npm run build diff --git a/cdk-infra/bin/cdk-infra.ts b/cdk-infra/bin/cdk-infra.ts index 6ce0732b8..1f9f4f4a2 100644 --- a/cdk-infra/bin/cdk-infra.ts +++ b/cdk-infra/bin/cdk-infra.ts @@ -1,9 +1,11 @@ #!/usr/bin/env node import 'source-map-support/register'; import * as cdk from 'aws-cdk-lib'; -import { AutoBuilderStack } from '../lib/auto-builder-stack'; import { getSsmPathPrefix, getWebhookSecureStrings, getWorkerSecureStrings } from '../utils/ssm'; import { getFeatureName, initContextVars } from '../utils/env'; +import { AutoBuilderQueueStack } from '../lib/stacks/auto-builder-queue-stack'; +import { WorkerStack } from '../lib/stacks/worker-stack'; +import { WebhookStack } from '../lib/stacks/webhook-stack'; async function main() { const app = new cdk.App(); @@ -15,6 +17,8 @@ async function main() { const ssmPrefix = getSsmPathPrefix(); + const env = { account: process.env.CDK_DEFAULT_ACCOUNT, region: process.env.CDK_DEFAULT_REGION }; + // Constructors can't be async, so since I am doing this workaround for the secure strings, // they need to be retrieved before we create the stack. const workerSecureStrings = await getWorkerSecureStrings(ssmPrefix); @@ -22,22 +26,13 @@ async function main() { const stackName = `auto-builder-stack-${getFeatureName()}`; - new AutoBuilderStack(app, stackName, { - /* If you don't specify 'env', this stack will be environment-agnostic. - * Account/Region-dependent features and context lookups will not work, - * but a single synthesized template can be deployed anywhere. */ - /* Uncomment the next line to specialize this stack for the AWS Account - * and Region that are implied by the current CLI configuration. */ - // env: { account: process.env.CDK_DEFAULT_ACCOUNT, region: process.env.CDK_DEFAULT_REGION }, - /* Uncomment the next line if you know exactly what Account and Region you - * want to deploy the stack to. */ - env: { account: process.env.CDK_DEFAULT_ACCOUNT, region: process.env.CDK_DEFAULT_REGION }, - workerSecureStrings, + const queues = new AutoBuilderQueueStack(app, `${stackName}-queues`, { env }); + const { clusterName } = new WorkerStack(app, `${stackName}-worker`, { queues, workerSecureStrings, env }); + new WebhookStack(app, `${stackName}-webhooks`, { + queues, + clusterName, webhookSecureStrings, - tags: { - stackName, - }, - /* For more information, see https://docs.aws.amazon.com/cdk/latest/guide/environments.html */ + env, }); } diff --git a/cdk-infra/cdk.json b/cdk-infra/cdk.json index 4dbaa9f68..ac87ae383 100644 --- a/cdk-infra/cdk.json +++ b/cdk-infra/cdk.json @@ -12,6 +12,7 @@ "yarn.lock", "node_modules", "dist", + "cdk.out", "test" ] }, diff --git a/cdk-infra/lib/constructs/api/webhook-env-construct.ts b/cdk-infra/lib/constructs/api/webhook-env-construct.ts index f7e8231c9..e32ac3bf9 100644 --- a/cdk-infra/lib/constructs/api/webhook-env-construct.ts +++ b/cdk-infra/lib/constructs/api/webhook-env-construct.ts @@ -32,6 +32,7 @@ export class WebhookEnvConstruct extends Construct { NODE_CONFIG_DIR: './config', JOBS_QUEUE_URL: jobsQueue.queueUrl, JOB_UPDATES_QUEUE_URL: jobUpdatesQueue.queueUrl, + NODE_OPTIONS: '--enable-source-maps', }; } } diff --git a/cdk-infra/lib/constructs/worker/worker-construct.ts b/cdk-infra/lib/constructs/worker/worker-construct.ts index ff66f155d..d0503b841 100644 --- a/cdk-infra/lib/constructs/worker/worker-construct.ts +++ b/cdk-infra/lib/constructs/worker/worker-construct.ts @@ -15,7 +15,7 @@ import path from 'path'; import { isEnhanced } from '../../../utils/env'; interface WorkerConstructProps { - environment: Record; + dockerEnvironment: Record; jobsQueue: IQueue; jobUpdatesQueue: IQueue; } @@ -23,7 +23,7 @@ export class WorkerConstruct extends Construct { readonly ecsTaskRole: IRole; readonly clusterName: string; - constructor(scope: Construct, id: string, { environment, jobsQueue, jobUpdatesQueue }: WorkerConstructProps) { + constructor(scope: Construct, id: string, { dockerEnvironment, jobsQueue, jobUpdatesQueue }: WorkerConstructProps) { super(scope, id); const vpc = new Vpc(this, 'vpc', { @@ -76,8 +76,8 @@ export class WorkerConstruct extends Construct { const containerProps: AssetImageProps = { file: isEnhanced() ? 'Dockerfile.enhanced' : undefined, buildArgs: { - NPM_BASE_64_AUTH: environment.NPM_BASE_64_AUTH, - NPM_EMAIL: environment.NPM_EMAIL, + NPM_BASE_64_AUTH: dockerEnvironment.NPM_BASE_64_AUTH, + NPM_EMAIL: dockerEnvironment.NPM_EMAIL, }, }; @@ -89,9 +89,22 @@ export class WorkerConstruct extends Construct { executionRole, }); + const updateTaskProtectionPolicy = new PolicyStatement({ + effect: Effect.ALLOW, + actions: ['ecs:UpdateTaskProtection'], + conditions: { + ArnEquals: { + 'ecs:cluster': cluster.clusterArn, + }, + }, + resources: ['*'], + }); + + taskRole.addToPolicy(updateTaskProtectionPolicy); + taskDefinition.addContainer('workerImage', { image: ContainerImage.fromAsset(path.join(__dirname, '../../../../'), containerProps), - environment, + environment: dockerEnvironment, logging: LogDrivers.awsLogs({ streamPrefix: 'autobuilderworker', logGroup: taskDefLogGroup, @@ -103,6 +116,7 @@ export class WorkerConstruct extends Construct { taskDefinition, desiredCount: 5, minHealthyPercent: 100, + maxHealthyPercent: 200, }); this.clusterName = cluster.clusterName; diff --git a/cdk-infra/lib/stacks/auto-builder-queue-stack.ts b/cdk-infra/lib/stacks/auto-builder-queue-stack.ts new file mode 100644 index 000000000..7f067433e --- /dev/null +++ b/cdk-infra/lib/stacks/auto-builder-queue-stack.ts @@ -0,0 +1,22 @@ +import { Stack, StackProps } from 'aws-cdk-lib'; +import { Construct } from 'constructs'; +import { AutoBuilderQueuesConstruct } from '../constructs/queue/queues-construct'; +import { IQueue } from 'aws-cdk-lib/aws-sqs'; + +export interface AutoBuilderQueues { + jobsQueue: IQueue; + jobUpdatesQueue: IQueue; +} + +export class AutoBuilderQueueStack extends Stack { + public readonly jobUpdatesQueue: IQueue; + public readonly jobsQueue: IQueue; + constructor(scope: Construct, id: string, props?: StackProps) { + super(scope, id, props); + + const { jobUpdatesQueue, jobsQueue } = new AutoBuilderQueuesConstruct(this, 'queues'); + + this.jobUpdatesQueue = jobUpdatesQueue; + this.jobsQueue = jobsQueue; + } +} diff --git a/cdk-infra/lib/auto-builder-stack.ts b/cdk-infra/lib/stacks/auto-builder-stack.ts similarity index 71% rename from cdk-infra/lib/auto-builder-stack.ts rename to cdk-infra/lib/stacks/auto-builder-stack.ts index 4af88d8f6..157232699 100644 --- a/cdk-infra/lib/auto-builder-stack.ts +++ b/cdk-infra/lib/stacks/auto-builder-stack.ts @@ -1,12 +1,12 @@ import { Stack, StackProps } from 'aws-cdk-lib'; import { Construct } from 'constructs'; -import { WebhookApiConstruct } from './constructs/api/webhook-api-construct'; -import { WebhookEnvConstruct } from './constructs/api/webhook-env-construct'; -import { AutoBuilderQueuesConstruct } from './constructs/queue/queues-construct'; -import { WorkerBucketsConstruct } from './constructs/worker/buckets-construct'; -import { WorkerConstruct } from './constructs/worker/worker-construct'; -import { WorkerEnvConstruct } from './constructs/worker/worker-env-construct'; +import { WebhookApiConstruct } from '../constructs/api/webhook-api-construct'; +import { WebhookEnvConstruct } from '../constructs/api/webhook-env-construct'; +import { AutoBuilderQueuesConstruct } from '../constructs/queue/queues-construct'; +import { WorkerBucketsConstruct } from '../constructs/worker/buckets-construct'; +import { WorkerConstruct } from '../constructs/worker/worker-construct'; +import { WorkerEnvConstruct } from '../constructs/worker/worker-env-construct'; interface AutoBuilderStackProps extends StackProps { workerSecureStrings: Record; @@ -32,7 +32,7 @@ export class AutoBuilderStack extends Stack { }); const { clusterName, ecsTaskRole } = new WorkerConstruct(this, 'worker', { - environment: workerEnvironment, + dockerEnvironment: workerEnvironment, ...queues, }); diff --git a/cdk-infra/lib/stacks/webhook-stack.ts b/cdk-infra/lib/stacks/webhook-stack.ts new file mode 100644 index 000000000..41367a41b --- /dev/null +++ b/cdk-infra/lib/stacks/webhook-stack.ts @@ -0,0 +1,30 @@ +import { Stack, StackProps } from 'aws-cdk-lib'; +import { Construct } from 'constructs'; +import { AutoBuilderQueues } from './auto-builder-queue-stack'; +import { WebhookApiConstruct } from '../constructs/api/webhook-api-construct'; +import { WebhookEnvConstruct } from '../constructs/api/webhook-env-construct'; + +interface WebhookStackProps extends StackProps { + webhookSecureStrings: Record; + queues: AutoBuilderQueues; + clusterName: string; +} +export class WebhookStack extends Stack { + constructor( + scope: Construct, + id: string, + { queues, webhookSecureStrings, clusterName, ...props }: WebhookStackProps + ) { + super(scope, id, props); + + const { environment: webhookEnvironment } = new WebhookEnvConstruct(this, 'ssmVars', { + ...queues, + secureStrings: webhookSecureStrings, + }); + + new WebhookApiConstruct(this, 'api', { + ...queues, + environment: { ...webhookEnvironment, TASK_DEFINITION_FAMILY: clusterName }, + }); + } +} diff --git a/cdk-infra/lib/stacks/worker-stack.ts b/cdk-infra/lib/stacks/worker-stack.ts new file mode 100644 index 000000000..9b3dcaefd --- /dev/null +++ b/cdk-infra/lib/stacks/worker-stack.ts @@ -0,0 +1,36 @@ +import { Stack, StackProps } from 'aws-cdk-lib'; +import { Construct } from 'constructs'; +import { WorkerConstruct } from '../constructs/worker/worker-construct'; +import { WorkerEnvConstruct } from '../constructs/worker/worker-env-construct'; +import { WorkerBucketsConstruct } from '../constructs/worker/buckets-construct'; +import { AutoBuilderQueues } from './auto-builder-queue-stack'; + +interface WorkerStackProps extends StackProps { + workerSecureStrings: Record; + queues: AutoBuilderQueues; +} + +export class WorkerStack extends Stack { + public readonly clusterName: string; + + constructor(scope: Construct, id: string, { queues, workerSecureStrings, ...props }: WorkerStackProps) { + super(scope, id, props); + + const { environment } = new WorkerEnvConstruct(this, 'workerSsmVars', { + ...queues, + secureStrings: workerSecureStrings, + }); + + const { clusterName, ecsTaskRole } = new WorkerConstruct(this, 'worker', { + dockerEnvironment: environment, + ...queues, + }); + const { buckets } = new WorkerBucketsConstruct(this, 'workerBuckets'); + + buckets.forEach((bucket) => { + bucket.grantReadWrite(ecsTaskRole); + }); + + this.clusterName = clusterName; + } +} diff --git a/cdk-infra/package-lock.json b/cdk-infra/package-lock.json index 3ad2929ce..d2462491b 100644 --- a/cdk-infra/package-lock.json +++ b/cdk-infra/package-lock.json @@ -17,11 +17,14 @@ "cdk-infra": "dist/bin/cdk-infra.js" }, "devDependencies": { + "@swc/core": "^1.3.66", + "@swc/helpers": "^0.5.1", "@types/jest": "^29.4.0", "@types/node": "18.14.6", "aws-cdk": "2.73.0", "esbuild": "^0.18.3", "jest": "^29.5.0", + "regenerator-runtime": "^0.13.11", "ts-jest": "^29.0.5", "ts-node": "^10.9.1", "typescript": "~4.9.5" @@ -2388,13 +2391,11 @@ } }, "node_modules/@swc/core": { - "version": "1.3.64", - "resolved": "https://registry.npmjs.org/@swc/core/-/core-1.3.64.tgz", - "integrity": "sha512-be1dk2pfjzBjFp/+p47/wvOAm7KpEtsi7hqI3ofox6pK3hBJChHgVTLVV9xqZm7CnYdyYYw3Z78hH6lrwutxXQ==", + "version": "1.3.66", + "resolved": "https://registry.npmjs.org/@swc/core/-/core-1.3.66.tgz", + "integrity": "sha512-Hpf91kH5ly7fHkWnApwryTQryT+TO4kMMPH3WyciUSQOWLE3UuQz1PtETHQQk7PZ/b1QF0qQurJrgfBr5bSKUA==", "dev": true, "hasInstallScript": true, - "optional": true, - "peer": true, "engines": { "node": ">=10" }, @@ -2403,16 +2404,16 @@ "url": "https://opencollective.com/swc" }, "optionalDependencies": { - "@swc/core-darwin-arm64": "1.3.64", - "@swc/core-darwin-x64": "1.3.64", - "@swc/core-linux-arm-gnueabihf": "1.3.64", - "@swc/core-linux-arm64-gnu": "1.3.64", - "@swc/core-linux-arm64-musl": "1.3.64", - "@swc/core-linux-x64-gnu": "1.3.64", - "@swc/core-linux-x64-musl": "1.3.64", - "@swc/core-win32-arm64-msvc": "1.3.64", - "@swc/core-win32-ia32-msvc": "1.3.64", - "@swc/core-win32-x64-msvc": "1.3.64" + "@swc/core-darwin-arm64": "1.3.66", + "@swc/core-darwin-x64": "1.3.66", + "@swc/core-linux-arm-gnueabihf": "1.3.66", + "@swc/core-linux-arm64-gnu": "1.3.66", + "@swc/core-linux-arm64-musl": "1.3.66", + "@swc/core-linux-x64-gnu": "1.3.66", + "@swc/core-linux-x64-musl": "1.3.66", + "@swc/core-win32-arm64-msvc": "1.3.66", + "@swc/core-win32-ia32-msvc": "1.3.66", + "@swc/core-win32-x64-msvc": "1.3.66" }, "peerDependencies": { "@swc/helpers": "^0.5.0" @@ -2424,9 +2425,9 @@ } }, "node_modules/@swc/core-darwin-arm64": { - "version": "1.3.64", - "resolved": "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.3.64.tgz", - "integrity": "sha512-gSPld6wxZBZoEvZXWmNfd+eJGlGvrEXmhMBCUwSccpuMa0KqK7F6AAZVu7kFkmlXPq2kS8owjk6/VXnVBmm5Vw==", + "version": "1.3.66", + "resolved": "https://registry.npmjs.org/@swc/core-darwin-arm64/-/core-darwin-arm64-1.3.66.tgz", + "integrity": "sha512-UijJsvuLy73vxeVYEy7urIHksXS+3BdvJ9s9AY+bRMSQW483NO7RLp8g4FdTyJbRaN0BH15SQnY0dcjQBkVuHw==", "cpu": [ "arm64" ], @@ -2435,15 +2436,14 @@ "os": [ "darwin" ], - "peer": true, "engines": { "node": ">=10" } }, "node_modules/@swc/core-darwin-x64": { - "version": "1.3.64", - "resolved": "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.3.64.tgz", - "integrity": "sha512-SJd1pr+U2pz5ZVv5BL36CN879Pn1V0014uVNlB+6yNh3e8T0fjUbtRJcbFiBB+OeYuJ1UNUeslaRJtKJNtMH7A==", + "version": "1.3.66", + "resolved": "https://registry.npmjs.org/@swc/core-darwin-x64/-/core-darwin-x64-1.3.66.tgz", + "integrity": "sha512-xGsHKvViQnwTNLF30Y/5OqWdnN6RsiyUI8awZXfz1sHcXCEaLe+v+WLQ+/E8sgw0YUkYVHzzfV/sAN2CezJK5Q==", "cpu": [ "x64" ], @@ -2452,15 +2452,14 @@ "os": [ "darwin" ], - "peer": true, "engines": { "node": ">=10" } }, "node_modules/@swc/core-linux-arm-gnueabihf": { - "version": "1.3.64", - "resolved": "https://registry.npmjs.org/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.3.64.tgz", - "integrity": "sha512-XE60bZS+qO+d8IQYAayhn3TRqyzVmQeOsX2B1yUHuKZU3Zb/mt/cmD/HLzZZW7J3z19kYf2na7Hvmnt3amUGoA==", + "version": "1.3.66", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm-gnueabihf/-/core-linux-arm-gnueabihf-1.3.66.tgz", + "integrity": "sha512-gNbLcSIV2pq90BkMSpzvK4xPXOl8GEF3YR4NaqF0CYSzQsVXXTTqMuX/r26xNYudBKzH0345S1MpoRk2qricnA==", "cpu": [ "arm" ], @@ -2469,15 +2468,14 @@ "os": [ "linux" ], - "peer": true, "engines": { "node": ">=10" } }, "node_modules/@swc/core-linux-arm64-gnu": { - "version": "1.3.64", - "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.3.64.tgz", - "integrity": "sha512-+jcUua4cYLRMqDicv+4AaTZUGgYWXkXVI9AzaAgfkMNLU2TMXwuYXopxk1giAMop88+ovzYIqrxErRdu70CgtQ==", + "version": "1.3.66", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-gnu/-/core-linux-arm64-gnu-1.3.66.tgz", + "integrity": "sha512-cJSQ0oplyWbJqy4rzVcnBYLAi6z1QT3QCcR7iAey0aAmCvfRBZJfXlyjggMjn4iosuadkauwCZR1xYNhBDRn7w==", "cpu": [ "arm64" ], @@ -2486,15 +2484,14 @@ "os": [ "linux" ], - "peer": true, "engines": { "node": ">=10" } }, "node_modules/@swc/core-linux-arm64-musl": { - "version": "1.3.64", - "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.3.64.tgz", - "integrity": "sha512-50MI8NFYUKhLncqY2piM/XOnNqZT6zY2ZoNOFsy63/T2gAYy1ts4mF4YUEkg4XOA2zhue1JSLZBUrHQXbgMYUQ==", + "version": "1.3.66", + "resolved": "https://registry.npmjs.org/@swc/core-linux-arm64-musl/-/core-linux-arm64-musl-1.3.66.tgz", + "integrity": "sha512-GDQZpcB9aGxG9PTA2shdIkoMZlGK5omJ8NR49uoBTtLBVYiGeXAwV0U1Uaw8kXEZj9i7wZDkvjzjSaNH3evRsg==", "cpu": [ "arm64" ], @@ -2503,15 +2500,14 @@ "os": [ "linux" ], - "peer": true, "engines": { "node": ">=10" } }, "node_modules/@swc/core-linux-x64-gnu": { - "version": "1.3.64", - "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.3.64.tgz", - "integrity": "sha512-bT8seQ41Q4J2JDgn2JpFCGNehGAIilAkZ476gEaKKroEWepBhkD0K1MspSSVYSJhLSGbBVSaadUEiBPxWgu1Rw==", + "version": "1.3.66", + "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-gnu/-/core-linux-x64-gnu-1.3.66.tgz", + "integrity": "sha512-lg8E4O/Pd9KfK0lajdinVMuGME8dSv7V9arhEpmlfGE2eXSDCWqDn5Htk5QVBstt9lt1lsRhWHJ/YYc2eQY30Q==", "cpu": [ "x64" ], @@ -2520,15 +2516,14 @@ "os": [ "linux" ], - "peer": true, "engines": { "node": ">=10" } }, "node_modules/@swc/core-linux-x64-musl": { - "version": "1.3.64", - "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.3.64.tgz", - "integrity": "sha512-sJgh3TXCDOEq/Au4XLAgNqy4rVcLeywQBoftnV3rcvX1/u9OCSRzgKLgYc5d1pEN5AMJV1l4u26kbGlQuZ+yRw==", + "version": "1.3.66", + "resolved": "https://registry.npmjs.org/@swc/core-linux-x64-musl/-/core-linux-x64-musl-1.3.66.tgz", + "integrity": "sha512-lo8ZcAO/zL2pZWH+LZIyge8u2MklaeuT6+FpVVpBFktMVdYXbaVtzpvWbgRFBZHvL3SRDF+u8jxjtkXhvGUpTw==", "cpu": [ "x64" ], @@ -2537,15 +2532,14 @@ "os": [ "linux" ], - "peer": true, "engines": { "node": ">=10" } }, "node_modules/@swc/core-win32-arm64-msvc": { - "version": "1.3.64", - "resolved": "https://registry.npmjs.org/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.3.64.tgz", - "integrity": "sha512-zWIy+mAWDjtJjl4e4mmhQL7g9KbkOwcWbeoIk4C6NT4VpjnjdX1pMml/Ez2sF5J5cGBwu7B1ePfTe/kAE6G36Q==", + "version": "1.3.66", + "resolved": "https://registry.npmjs.org/@swc/core-win32-arm64-msvc/-/core-win32-arm64-msvc-1.3.66.tgz", + "integrity": "sha512-cQoVwBuJY5WkHbfpCOlndNwYr1ZThatRjQQvKy540NUIeAEk9Fa6ozlDBtU75UdaWKtUG6YQ/bWz+KTemheVxw==", "cpu": [ "arm64" ], @@ -2554,15 +2548,14 @@ "os": [ "win32" ], - "peer": true, "engines": { "node": ">=10" } }, "node_modules/@swc/core-win32-ia32-msvc": { - "version": "1.3.64", - "resolved": "https://registry.npmjs.org/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.3.64.tgz", - "integrity": "sha512-6HMiuUeSMpTUAimb1E+gUNjy8m211oAzw+wjU8oOdA6iihWaLBz4TOhU9IaKZPPjqEcYGwqaT3tj5b5+mxde6Q==", + "version": "1.3.66", + "resolved": "https://registry.npmjs.org/@swc/core-win32-ia32-msvc/-/core-win32-ia32-msvc-1.3.66.tgz", + "integrity": "sha512-y/FrAIINK4UBeUQQknGlWXEyjo+MBvjF7WkUf2KP7sNr9EHHy8+dXohAGd5Anz0eJrqOM1ZXR/GEjxRp7bGQ1Q==", "cpu": [ "ia32" ], @@ -2571,15 +2564,14 @@ "os": [ "win32" ], - "peer": true, "engines": { "node": ">=10" } }, "node_modules/@swc/core-win32-x64-msvc": { - "version": "1.3.64", - "resolved": "https://registry.npmjs.org/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.3.64.tgz", - "integrity": "sha512-c8Al0JJfmgnO9sg6w34PICibqI4p7iXywo+wOxjY88oFwMcfV5rGaif1Fe3RqxJP/1WtUV7lYuKKZrneMXtyLA==", + "version": "1.3.66", + "resolved": "https://registry.npmjs.org/@swc/core-win32-x64-msvc/-/core-win32-x64-msvc-1.3.66.tgz", + "integrity": "sha512-yI64ACzS14qFLrfyO12qW+f/UROTotzDeEbuyJAaPD2IZexoT1cICznI3sBmIfrSt33mVuW8eF5m3AG/NUImzw==", "cpu": [ "x64" ], @@ -2588,7 +2580,6 @@ "os": [ "win32" ], - "peer": true, "engines": { "node": ">=10" } @@ -2598,8 +2589,6 @@ "resolved": "https://registry.npmjs.org/@swc/helpers/-/helpers-0.5.1.tgz", "integrity": "sha512-sJ902EfIzn1Fa+qYmjdQqh8tPsoxyBz+8yBKC2HKUxyezKJFwPGOn7pv4WY6QuQW//ySQi5lJjA/ZT9sNWWNTg==", "dev": true, - "optional": true, - "peer": true, "dependencies": { "tslib": "^2.4.0" } @@ -5161,6 +5150,12 @@ "integrity": "sha512-xWGDIW6x921xtzPkhiULtthJHoJvBbF3q26fzloPCK0hsvxtPVelvftw3zjbHWSkR2km9Z+4uxbDDK/6Zw9B8w==", "dev": true }, + "node_modules/regenerator-runtime": { + "version": "0.13.11", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.11.tgz", + "integrity": "sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg==", + "dev": true + }, "node_modules/require-directory": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", diff --git a/cdk-infra/package.json b/cdk-infra/package.json index 38e1c4043..aeb6b6b2b 100644 --- a/cdk-infra/package.json +++ b/cdk-infra/package.json @@ -8,14 +8,21 @@ "build": "tsc", "watch": "tsc -w", "test": "jest", - "cdk": "cdk" + "cdk": "cdk", + "deploy:enhanced": "cdk deploy -c enhanced=true -c customFeatureName=enhancedApp -c env=dotcomstg --require-approval never --all", + "deploy:enhanced:worker": "cdk deploy auto-builder-stack-enhancedApp-worker -c enhanced=true -c customFeatureName=enhancedApp -c env=dotcomstg --require-approval never --exclusively", + "deploy:enhanced:webhooks": "cdk deploy auto-builder-stack-enhancedApp-webhooks -c enhanced=true -c customFeatureName=enhancedApp -c env=dotcomstg --require-approval never --exclusively" + }, "devDependencies": { + "@swc/core": "^1.3.66", + "@swc/helpers": "^0.5.1", "@types/jest": "^29.4.0", "@types/node": "18.14.6", "aws-cdk": "2.73.0", "esbuild": "^0.18.3", "jest": "^29.5.0", + "regenerator-runtime": "^0.13.11", "ts-jest": "^29.0.5", "ts-node": "^10.9.1", "typescript": "~4.9.5" diff --git a/cdk-infra/test/cdk-infra.test.ts b/cdk-infra/test/cdk-infra.test.ts index d88f55113..930761a4a 100644 --- a/cdk-infra/test/cdk-infra.test.ts +++ b/cdk-infra/test/cdk-infra.test.ts @@ -1,6 +1,6 @@ import * as cdk from 'aws-cdk-lib'; import { Template } from 'aws-cdk-lib/assertions'; -import { AutoBuilderStack } from '../lib/auto-builder-stack'; +import { AutoBuilderStack } from '../lib/stacks/auto-builder-stack'; describe('autobuilder stack tests', () => { it('The stack contains the expected number of resources', () => { diff --git a/cdk-infra/tsconfig.json b/cdk-infra/tsconfig.json index 34f9d8ea0..e08fc8b17 100644 --- a/cdk-infra/tsconfig.json +++ b/cdk-infra/tsconfig.json @@ -21,5 +21,8 @@ "esModuleInterop": true, "typeRoots": ["./node_modules/@types"] }, + "ts-node": { + "swc": true + }, "exclude": ["node_modules", "cdk.out", "dist"] } diff --git a/package-lock.json b/package-lock.json index cb42cd1bd..7b385054e 100644 --- a/package-lock.json +++ b/package-lock.json @@ -34392,4 +34392,4 @@ } } } -} +} \ No newline at end of file diff --git a/package.json b/package.json index 524693ade..42db57368 100644 --- a/package.json +++ b/package.json @@ -96,4 +96,4 @@ "npm run lint:fix" ] } -} +} \ No newline at end of file diff --git a/src/enhanced/enhancedApp.ts b/src/enhanced/enhancedApp.ts index 71baafb79..4772c9a42 100644 --- a/src/enhanced/enhancedApp.ts +++ b/src/enhanced/enhancedApp.ts @@ -1,15 +1,67 @@ import { handleJob } from './utils/job'; import { listenToJobQueue } from './utils/queue'; +import mongodb, { MongoClient } from 'mongodb'; +import c from 'config'; -async function app() { - console.log('starting application'); - const { jobId } = await listenToJobQueue(); +let client: MongoClient | undefined; - await handleJob(jobId); +async function connectToDb(): Promise { + const atlasURL = `mongodb+srv://${c.get('dbUsername')}:${c.get('dbPassword')}@${c.get( + 'dbHost' + )}/?retryWrites=true&w=majority`; - console.log('process completed'); + console.log('[connectToDb]: Instantiating MongoDB client object'); + client = new MongoClient(atlasURL); + console.log('[connectToDb]: Connecting to client'); + await client.connect(); + return client.db(c.get('dbName')); +} + +async function cleanupJob(): Promise { + try { + console.log('[cleanupJob]: Closing MongoDB client connection...'); + await client?.close(); + + console.log('[cleanupJob]: Successfully closed MongoDB client connection!'); + } catch (e) { + console.log('[cleanupJob]: ERROR! Unsuccessfully closed MongoDB client connection', e); + process.exitCode = 1; + } + + process.exit(); +} - process.exit(0); +/** + * Added this function as it appears that the finally block would be executed after + * the first promise resolves within the app function. Broke this out so that we only call the clean up after we handle the job. + * the `finally` block is always called after the try, even if an exception is thrown. If an exception is thrown, the cleanUp job is called, + * and the exception is then thrown after. + */ +async function handleJobAndCleanUp(jobId: string, db: mongodb.Db) { + try { + await handleJob(jobId, db); + } finally { + await cleanupJob(); + } +} +async function app(): Promise { + console.log('[app]: starting application'); + + try { + const { jobId } = await listenToJobQueue(); + const db = await connectToDb(); + + await handleJobAndCleanUp(jobId, db); + + console.log('[app]: process completed'); + } catch (e) { + console.error('[app]: ERROR! Job initialization failed', e); + process.exitCode = 1; + } } app(); + +process.on('SIGTERM', async () => { + await cleanupJob(); +}); diff --git a/src/enhanced/utils/job/handle-job.ts b/src/enhanced/utils/job/handle-job.ts new file mode 100644 index 000000000..edc461004 --- /dev/null +++ b/src/enhanced/utils/job/handle-job.ts @@ -0,0 +1,67 @@ +import { JobManager, JobHandlerFactory } from '../../../job/jobManager'; +import { K8SCDNConnector } from '../../../services/cdn'; +import { ParameterStoreConnector } from '../../../services/ssm'; +import { GitHubConnector } from '../../../services/repo'; +import { HybridJobLogger, ConsoleLogger } from '../../../services/logger'; +import { GithubCommandExecutor, JobSpecificCommandExecutor } from '../../../services/commandExecutor'; +import { JobRepository } from '../../../repositories/jobRepository'; +import { RepoEntitlementsRepository } from '../../../repositories/repoEntitlementsRepository'; +import c from 'config'; +import * as mongodb from 'mongodb'; +import { FileSystemServices } from '../../../services/fileServices'; +import { JobValidator } from '../../../job/jobValidator'; +import { RepoBranchesRepository } from '../../../repositories/repoBranchesRepository'; +import { ISSOConnector, OktaConnector } from '../../../services/sso'; +import { EnhancedJobHandlerFactory } from '../../job/enhancedJobHandlerFactory'; + +let consoleLogger: ConsoleLogger; +let fileSystemServices: FileSystemServices; +let jobCommandExecutor: JobSpecificCommandExecutor; +let githubCommandExecutor: GithubCommandExecutor; +let jobRepository: JobRepository; +let hybridJobLogger: HybridJobLogger; +let repoEntitlementRepository: RepoEntitlementsRepository; +let jobValidator: JobValidator; +let cdnConnector: K8SCDNConnector; +let repoConnector: GitHubConnector; +let jobHandlerFactory: JobHandlerFactory; +let jobManager: JobManager; +let repoBranchesRepo: RepoBranchesRepository; +let ssmConnector: ParameterStoreConnector; +let ssoConnector: ISSOConnector; + +export async function handleJob(jobId: string, db: mongodb.Db) { + consoleLogger = new ConsoleLogger(); + fileSystemServices = new FileSystemServices(); + jobCommandExecutor = new JobSpecificCommandExecutor(); + githubCommandExecutor = new GithubCommandExecutor(); + jobRepository = new JobRepository(db, c, consoleLogger); + hybridJobLogger = new HybridJobLogger(jobRepository); + ssmConnector = new ParameterStoreConnector(); + repoEntitlementRepository = new RepoEntitlementsRepository(db, c, consoleLogger); + repoBranchesRepo = new RepoBranchesRepository(db, c, consoleLogger); + jobValidator = new JobValidator(fileSystemServices, repoEntitlementRepository, repoBranchesRepo); + ssoConnector = new OktaConnector(c, consoleLogger); + cdnConnector = new K8SCDNConnector(c, consoleLogger, ssmConnector, ssoConnector); + repoConnector = new GitHubConnector(githubCommandExecutor, c, fileSystemServices, hybridJobLogger); + jobHandlerFactory = new EnhancedJobHandlerFactory(); + + jobManager = new JobManager( + c, + jobValidator, + jobHandlerFactory, + jobCommandExecutor, + jobRepository, + cdnConnector, + repoConnector, + fileSystemServices, + hybridJobLogger, + repoBranchesRepo + ); + + try { + await jobManager.startSpecificJob(jobId); + } catch (err) { + consoleLogger.info('enhancedApp', err); + } +} diff --git a/src/enhanced/utils/job/index.ts b/src/enhanced/utils/job/index.ts index 450ea1cbf..c19e5fc59 100644 --- a/src/enhanced/utils/job/index.ts +++ b/src/enhanced/utils/job/index.ts @@ -1,80 +1,4 @@ -import { JobManager, JobHandlerFactory } from '../../../job/jobManager'; -import { K8SCDNConnector } from '../../../services/cdn'; -import { ParameterStoreConnector } from '../../../services/ssm'; -import { GitHubConnector } from '../../../services/repo'; -import { HybridJobLogger, ConsoleLogger } from '../../../services/logger'; -import { GithubCommandExecutor, JobSpecificCommandExecutor } from '../../../services/commandExecutor'; -import { JobRepository } from '../../../repositories/jobRepository'; -import { RepoEntitlementsRepository } from '../../../repositories/repoEntitlementsRepository'; -import c from 'config'; -import * as mongodb from 'mongodb'; -import { FileSystemServices } from '../../../services/fileServices'; -import { JobValidator } from '../../../job/jobValidator'; -import { RepoBranchesRepository } from '../../../repositories/repoBranchesRepository'; -import { ISSOConnector, OktaConnector } from '../../../services/sso'; -import { EnhancedJobHandlerFactory } from '../../job/enhancedJobHandlerFactory'; +import { handleJob } from './handle-job'; +import { protectTask } from './task-management'; -let db: mongodb.Db; -let consoleLogger: ConsoleLogger; -let fileSystemServices: FileSystemServices; -let jobCommandExecutor: JobSpecificCommandExecutor; -let githubCommandExecutor: GithubCommandExecutor; -let jobRepository: JobRepository; -let hybridJobLogger: HybridJobLogger; -let repoEntitlementRepository: RepoEntitlementsRepository; -let jobValidator: JobValidator; -let cdnConnector: K8SCDNConnector; -let repoConnector: GitHubConnector; -let jobHandlerFactory: JobHandlerFactory; -let jobManager: JobManager; -let repoBranchesRepo: RepoBranchesRepository; -let ssmConnector: ParameterStoreConnector; -let ssoConnector: ISSOConnector; -let client: mongodb.MongoClient; - -export async function handleJob(jobId: string) { - const atlasURL = `mongodb+srv://${c.get('dbUsername')}:${c.get('dbPassword')}@${c.get( - 'dbHost' - )}/?retryWrites=true&w=majority`; - - client = new mongodb.MongoClient(atlasURL); - await client.connect(); - db = client.db(c.get('dbName')); - consoleLogger = new ConsoleLogger(); - fileSystemServices = new FileSystemServices(); - jobCommandExecutor = new JobSpecificCommandExecutor(); - githubCommandExecutor = new GithubCommandExecutor(); - jobRepository = new JobRepository(db, c, consoleLogger); - hybridJobLogger = new HybridJobLogger(jobRepository); - ssmConnector = new ParameterStoreConnector(); - repoEntitlementRepository = new RepoEntitlementsRepository(db, c, consoleLogger); - repoBranchesRepo = new RepoBranchesRepository(db, c, consoleLogger); - jobValidator = new JobValidator(fileSystemServices, repoEntitlementRepository, repoBranchesRepo); - ssoConnector = new OktaConnector(c, consoleLogger); - cdnConnector = new K8SCDNConnector(c, consoleLogger, ssmConnector, ssoConnector); - repoConnector = new GitHubConnector(githubCommandExecutor, c, fileSystemServices, hybridJobLogger); - jobHandlerFactory = new EnhancedJobHandlerFactory(); - - jobManager = new JobManager( - c, - jobValidator, - jobHandlerFactory, - jobCommandExecutor, - jobRepository, - cdnConnector, - repoConnector, - fileSystemServices, - hybridJobLogger, - repoBranchesRepo - ); - - try { - await jobManager.startSpecificJob(jobId); - } catch (err) { - consoleLogger.info('enhancedApp', err); - } -} -// clean up -process.on('SIGTERM', () => { - client.close(); -}); +export { handleJob, protectTask }; diff --git a/src/enhanced/utils/job/task-management.ts b/src/enhanced/utils/job/task-management.ts new file mode 100644 index 000000000..cb669ff61 --- /dev/null +++ b/src/enhanced/utils/job/task-management.ts @@ -0,0 +1,36 @@ +import axios from 'axios'; + +interface ProtectionPutResponse { + failure?: { + Arn: string; + Detail: string | null; + Reason: string; + }; +} + +/** + * Protecting the task will prevent the task from being deleted when we do a deploy/update. + * This means that the job will not be lost, and will continue to process like normal. + */ +export async function protectTask() { + const { ECS_AGENT_URI } = process.env; + + if (!ECS_AGENT_URI) throw new Error('ERROR! No agent URI defined'); + + try { + const { failure } = ( + await axios.put(`${ECS_AGENT_URI}/task-protection/v1/state`, { + ProtectionEnabled: true, + }) + ).data; + + if (failure) { + const { Reason, Detail } = failure; + throw new Error(`ERROR! Could not protect task. Reason: ${Reason} \n Details: ${Detail}`); + } + } catch (e) { + console.error('ERROR! Could not protect task', e); + process.exitCode = 1; + throw e; + } +} diff --git a/src/enhanced/utils/queue/index.ts b/src/enhanced/utils/queue/index.ts index 7d7ee6630..289ea5306 100644 --- a/src/enhanced/utils/queue/index.ts +++ b/src/enhanced/utils/queue/index.ts @@ -2,6 +2,7 @@ import { ReceiveMessageCommandInput, SQS } from '@aws-sdk/client-sqs'; import config from 'config'; import { JobsQueuePayload } from '../../types/job-types'; import { isJobQueuePayload } from '../../types/utils/type-guards'; +import { protectTask } from '../job'; /** * This function listens to the job queue until a message is received. @@ -13,7 +14,7 @@ export async function listenToJobQueue(): Promise { const client = new SQS({ region }); - console.log('Polling jobsQueue'); + console.log('[listenToJobQueue]: Polling jobsQueue'); // We want to loop indefinitely so that we continue to poll the queue. while (true) { @@ -27,18 +28,29 @@ export async function listenToJobQueue(): Promise { if (!res.Messages || res.Messages.length === 0) continue; - console.log('received valid message'); - const message = res.Messages[0]; - // We have the message body, now we can delete it from the queue. + // Before we delete the message from the queue, we want to protect the task. + // This is because if protect the task after we delete, we could end up with a condition + // where the task is unprotected, and it deletes a message. This means that if we happen + // to do a deploy in this state, we will delete the message from the queue AND end the task, + // preventing the job from completing while also losing the request in the process. + // This means that the job request will never be processed. + // NOTE: Intentionally not catching here, as this throw should be handled by the method listening to the queue. + // We don't want to continue listening to the queue, as there is something wrong with the protect task mechanism. + // We can let the task end, as it is unsafe to let an unprotected task process a job. + await protectTask(); + + console.log('[listenToJobQueue]: Deleting message...'); + + // We have validated the message, now we can delete it. try { await client.deleteMessage({ QueueUrl: queueUrl, ReceiptHandle: message.ReceiptHandle }); } catch (e) { // We want to keep the task alive because we do not want to process multiple jobs. // This could lead to multiple tasks completing jobs, without new tasks being spun up. console.error( - `ERROR! Could not delete message. Preventing job from being processed, as this could lead to multiple jobs being processed. Error Obj: ${JSON.stringify( + `[listenToJobQueue]: ERROR! Could not delete message. Preventing job from being processed, as this could lead to multiple jobs being processed. Error Obj: ${JSON.stringify( e, null, 4 @@ -47,8 +59,12 @@ export async function listenToJobQueue(): Promise { continue; } + console.log('[listenToJobQueue]: Message successfully deleted from queue!'); + if (!message.Body) { - console.error(`ERROR! Received message from queue without body. Message ID is: ${message.MessageId}`); + console.error( + `[listenToJobQueue]: ERROR! Received message from queue without body. Message ID is: ${message.MessageId}` + ); continue; } @@ -58,13 +74,15 @@ export async function listenToJobQueue(): Promise { // This ensures that the `payload` object will be of type `JobQueuePayload` after the if statement. if (!isJobQueuePayload(payload)) { console.error( - `ERROR! Invalid payload data received from message ID: ${message.MessageId}. Payload received: ${JSON.stringify( - payload - )}` + `[listenToJobQueue]: ERROR! Invalid payload data received from message ID: ${ + message.MessageId + }. Payload received: ${JSON.stringify(payload)}` ); continue; } + console.log('[listenToJobQueue]: received valid message'); + // Great! we received a proper message from the queue. Return this object as we will no longer // want to poll for more messages. return payload; diff --git a/tsconfig.json b/tsconfig.json index 5da563ed3..5db61e33f 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -11,5 +11,5 @@ "experimentalDecorators": true }, "include": ["src/**/*.ts"], - "exclude": ["modules", "node_modules", "tests/**/*.ts", "api/**/*.ts"] + "exclude": ["modules", "node_modules", "tests/**/*.ts", "api/**/*.ts", "cdk-infra"] }