diff --git a/e2e/k8s/kindConfig.yaml b/e2e/k8s/kindConfig.yaml deleted file mode 100644 index 54fd983a1f4..00000000000 --- a/e2e/k8s/kindConfig.yaml +++ /dev/null @@ -1,12 +0,0 @@ -kind: Cluster -name: k8se2e -apiVersion: kind.x-k8s.io/v1alpha4 -nodes: -- role: control-plane - extraPortMappings: - - containerPort: 30200 - hostPort: 49200 - - containerPort: 30678 - hostPort: 45678 - - containerPort: 30092 - hostPort: 49092 diff --git a/e2e/k8s/kindConfigDefaultPorts.yaml b/e2e/k8s/kindConfigDefaultPorts.yaml new file mode 100644 index 00000000000..9980071b769 --- /dev/null +++ b/e2e/k8s/kindConfigDefaultPorts.yaml @@ -0,0 +1,12 @@ +kind: Cluster +name: k8se2e +apiVersion: kind.x-k8s.io/v1alpha4 +nodes: +- role: control-plane + extraPortMappings: + - containerPort: 30200 # Map internal elasticsearch service to host port + hostPort: 9200 + - containerPort: 30678 # Map internal teraslice service to host port + hostPort: 5678 + - containerPort: 30092 # Map internal kafka service to host port + hostPort: 9092 diff --git a/e2e/k8s/kindConfigTestPorts.yaml b/e2e/k8s/kindConfigTestPorts.yaml new file mode 100644 index 00000000000..d5b8b2e8280 --- /dev/null +++ b/e2e/k8s/kindConfigTestPorts.yaml @@ -0,0 +1,12 @@ +kind: Cluster +name: k8se2e +apiVersion: kind.x-k8s.io/v1alpha4 +nodes: +- role: control-plane + extraPortMappings: + - containerPort: 30200 # Map internal elasticsearch service to host port + hostPort: 49200 + - containerPort: 30678 # Map internal teraslice service to host port + hostPort: 45678 + - containerPort: 30092 # Map internal kafka service to host port + hostPort: 49092 diff --git a/e2e/test/global.setup.js b/e2e/test/global.setup.js index 0d57b82b4d6..936618e5b51 100644 --- a/e2e/test/global.setup.js +++ b/e2e/test/global.setup.js @@ -13,7 +13,7 @@ const signale = require('./signale'); const setupTerasliceConfig = require('./setup-config'); const downloadAssets = require('./download-assets'); const { - CONFIG_PATH, ASSETS_PATH, TEST_PLATFORM, HOST_IP + CONFIG_PATH, ASSETS_PATH, TEST_PLATFORM } = require('./config'); module.exports = async () => { @@ -41,7 +41,7 @@ module.exports = async () => { if (TEST_PLATFORM === 'kubernetes') { await deployK8sTeraslice(); await teraslice.waitForTeraslice(); - await setAliasAndBaseAssets(HOST_IP); + await setAliasAndBaseAssets(); } else { await Promise.all([setupTerasliceConfig(), downloadAssets()]); await dockerUp(); diff --git a/e2e/test/teraslice-harness.js b/e2e/test/teraslice-harness.js index 3fdbb5feec3..a54a5baac8b 100644 --- a/e2e/test/teraslice-harness.js +++ b/e2e/test/teraslice-harness.js @@ -90,7 +90,7 @@ module.exports = class TerasliceHarness { if (TEST_PLATFORM === 'kubernetes') { try { cleanupIndex(this.client, `${SPEC_INDEX_PREFIX}*`); - await showState(HOST_IP); + await showState(); } catch (err) { signale.error('Failure to clean indices and assets', err); throw err; diff --git a/package.json b/package.json index c80facbbf97..3d4e5117edb 100644 --- a/package.json +++ b/package.json @@ -22,6 +22,9 @@ "build:watch": "yarn run build --watch", "bump": "ts-scripts bump", "docs": "ts-scripts docs", + "k8s": "TEST_ELASTICSEARCH='true' ELASTICSEARCH_PORT='9200' ts-scripts k8s-env", + "k8s:kafka": "TEST_ELASTICSEARCH='true' ELASTICSEARCH_PORT='9200' TEST_KAFKA='true' KAFKA_PORT='9092' ts-scripts k8s-env", + "k8s:noBuild": "TEST_ELASTICSEARCH='true' ELASTICSEARCH_PORT='9200' SKIP_DOCKER_BUILD_IN_K8S='true' ts-scripts k8s-env", "lint": "eslint --cache --ext .js,.jsx,.ts,.tsx .", "lint:fix": "yarn lint --fix && yarn sync", "setup": "yarn $YARN_SETUP_ARGS && yarn run build --force", @@ -77,7 +80,8 @@ "unit": [], "_for_testing_": [ "elasticsearch" - ] + ], + "k8s_env": [] } }, "docker": { diff --git a/packages/scripts/package.json b/packages/scripts/package.json index 8d9ee38e49e..9bbcf789ef9 100644 --- a/packages/scripts/package.json +++ b/packages/scripts/package.json @@ -1,7 +1,7 @@ { "name": "@terascope/scripts", "displayName": "Scripts", - "version": "0.60.1", + "version": "0.60.2", "description": "A collection of terascope monorepo scripts", "homepage": "https://github.com/terascope/teraslice/tree/master/packages/scripts#readme", "bugs": { diff --git a/packages/scripts/src/cmds/k8s-env.ts b/packages/scripts/src/cmds/k8s-env.ts new file mode 100644 index 00000000000..6ed8cdd7137 --- /dev/null +++ b/packages/scripts/src/cmds/k8s-env.ts @@ -0,0 +1,67 @@ +import { CommandModule } from 'yargs'; +import * as config from '../helpers/config'; +import { launchK8sEnv } from '../helpers/k8s-env'; +import { kafkaVersionMapper } from '../helpers/mapper'; + +const cmd: CommandModule = { + command: 'k8s-env', + describe: 'Run a local kubernetes dev environment using kind.', + builder(yargs) { + return yargs + .example('TEST_ELASTICSEARCH=\'true\' ELASTICSEARCH_PORT=\'9200\' $0 k8s-env', 'Start a kind kubernetes cluster running teraslice and elasticsearch.') + .example('TEST_ELASTICSEARCH=\'true\' ELASTICSEARCH_PORT=\'9200\' TEST_KAFKA=\'true\' KAFKA_PORT=\'9092\' $0 k8s-env', 'Start a kind kubernetes cluster running teraslice, elasticsearch, kafka, and zookeeper.') + .example('TEST_ELASTICSEARCH=\'true\' ELASTICSEARCH_PORT=\'9200\' SKIP_DOCKER_BUILD_IN_K8S=\'true\' $0 k8s-env', 'Start a kind kubernetes cluster, but skip building a new teraslice docker image.') + .option('elasticsearch-version', { + description: 'The elasticsearch version to use', + type: 'string', + default: config.ELASTICSEARCH_VERSION, + }) + .option('kafka-version', { + description: 'The kafka version to use', + type: 'string', + default: config.KAFKA_VERSION, + }) + .option('minio-version', { + description: 'The minio version to use', + type: 'string', + default: config.MINIO_VERSION, + }) + .option('rabbitmq-version', { + description: 'The rabbitmq version to use', + type: 'string', + default: config.RABBITMQ_VERSION, + }) + .option('opensearch-version', { + description: 'The opensearch version to use', + type: 'string', + default: config.OPENSEARCH_VERSION, + }) + .option('node-version', { + description: 'Node version, there must be a Docker base image with this version (e.g. 18.16.0)', + type: 'string', + default: config.NODE_VERSION + }) + .option('skip-build', { + description: 'Skip building the teraslice docker iamge', + type: 'boolean', + default: config.SKIP_DOCKER_BUILD_IN_K8S + }); + }, + handler(argv) { + const kafkaCPVersion = kafkaVersionMapper(argv.kafkaVersion as string); + + return launchK8sEnv({ + elasticsearchVersion: argv.elasticsearchVersion as string, + kafkaVersion: argv.kafkaVersion as string, + kafkaImageVersion: kafkaCPVersion, + zookeeperVersion: kafkaCPVersion, + minioVersion: argv.minioVersion as string, + rabbitmqVersion: argv.rabbitmqVersion as string, + opensearchVersion: argv.opensearchVersion as string, + nodeVersion: argv['node-version'] as string, + skipBuild: Boolean(argv['skip-build']) + }); + }, +}; + +export = cmd; diff --git a/packages/scripts/src/helpers/config.ts b/packages/scripts/src/helpers/config.ts index abb2a213c4e..c257a59e832 100644 --- a/packages/scripts/src/helpers/config.ts +++ b/packages/scripts/src/helpers/config.ts @@ -3,28 +3,12 @@ import { toBoolean, toSafeString, isCI, toIntegerOrThrow } from '@terascope/utils'; import { Service } from './interfaces'; +import { kafkaVersionMapper } from './mapper'; const forceColor = process.env.FORCE_COLOR || '1'; export const FORCE_COLOR = toBoolean(forceColor) ? '1' : '0'; -const kafkaMapper = { - 3: { - 0: '7.0.11', - 1: '7.1.9', - 2: '7.2.7', - 3: '7.3.5', - 4: '7.4.2', - 5: '7.5.1' - }, - 2: { - 4: '5.4.10', - 5: '5.5.12', - 6: '6.0.15', - 7: '6.1.13', - 8: '6.2.12' - } -}; /** The timeout for how long a service has to stand up */ export const SERVICE_UP_TIMEOUT = process.env.SERVICE_UP_TIMEOUT ?? '2m'; @@ -51,11 +35,11 @@ export const KAFKA_HOSTNAME = process.env.KAFKA_HOSTNAME || HOST_IP; export const KAFKA_PORT = process.env.KAFKA_PORT || '49092'; export const KAFKA_BROKER = `${KAFKA_HOSTNAME}:${KAFKA_PORT}`; export const KAFKA_VERSION = process.env.KAFKA_VERSION || '3.1'; -// Use kafkaMapper to determine confluentinc/cp-kafka image version from KAFKA_VERSION -export const KAFKA_IMAGE_VERSION = kafkaMapper[KAFKA_VERSION.charAt(0)][KAFKA_VERSION.charAt(2)]; +// Use kafkaVersionMapper to determine confluentinc/cp-kafka image version from KAFKA_VERSION +export const KAFKA_IMAGE_VERSION = kafkaVersionMapper(KAFKA_VERSION); export const KAFKA_DOCKER_IMAGE = process.env.KAFKA_DOCKER_IMAGE || 'confluentinc/cp-kafka'; -// Zookeeper version needs to match KAFKA_IMAGE_VERSION which is determined by kafkaMapper -export const ZOOKEEPER_VERSION = kafkaMapper[KAFKA_VERSION.charAt(0)][KAFKA_VERSION.charAt(2)]; +// Zookeeper version needs to match KAFKA_IMAGE_VERSION which is determined by kafkaVersionMapper +export const ZOOKEEPER_VERSION = KAFKA_IMAGE_VERSION; export const ZOOKEEPER_CLIENT_PORT = process.env.ZOOKEEPER_CLIENT_PORT || '42181'; export const ZOOKEEPER_TICK_TIME = process.env.ZOOKEEPER_TICK_TIME || '2000'; export const ZOOKEEPER_DOCKER_IMAGE = process.env.ZOOKEEPER_DOCKER_IMAGE || 'confluentinc/cp-zookeeper'; @@ -112,7 +96,7 @@ export const DEV_TAG = toSafeString(( || process.env.TRAVIS_BRANCH || process.env.CI_COMMIT_REF_SLUG || 'local' -// convert dependabot/npm_and_yarn/dep-x.x.x to dependabot + // convert dependabot/npm_and_yarn/dep-x.x.x to dependabot ).split('/', 1)[0]); /** @@ -128,6 +112,8 @@ export const DEV_DOCKER_IMAGE = process.env.DEV_DOCKER_IMAGE || undefined; */ export const SKIP_DOCKER_BUILD_IN_E2E = toBoolean(process.env.SKIP_DOCKER_BUILD_IN_E2E ?? false); +export const SKIP_DOCKER_BUILD_IN_K8S = toBoolean(process.env.SKIP_DOCKER_BUILD_IN_K8S ?? false); + export const SKIP_E2E_OUTPUT_LOGS = toBoolean(process.env.SKIP_E2E_OUTPUT_LOGS ?? !isCI); /** @@ -163,7 +149,7 @@ export const ENV_SERVICES = [ testOpensearch ? Service.Opensearch : undefined, testElasticsearch ? Service.Elasticsearch : undefined, toBoolean(TEST_KAFKA) ? Service.Kafka : undefined, - /// couple kafa with zookeeper + /// couple kafka with zookeeper toBoolean(TEST_KAFKA) ? Service.Zookeeper : undefined, toBoolean(TEST_MINIO) ? Service.Minio : undefined, testRestrainedOpensearch ? Service.RestrainedOpensearch : undefined, diff --git a/packages/scripts/src/helpers/k8s-env/index.ts b/packages/scripts/src/helpers/k8s-env/index.ts new file mode 100644 index 00000000000..36bea30b81a --- /dev/null +++ b/packages/scripts/src/helpers/k8s-env/index.ts @@ -0,0 +1,86 @@ +// import { debugLogger } from '@terascope/utils'; +import { + createKindCluster, + createNamespace, + deployK8sTeraslice, + dockerTag, + isKindInstalled, + isKubectlInstalled, + loadTerasliceImage +} from '../scripts'; +import { k8sEnvOptions } from './interfaces'; +import signale from '../signale'; +import { getDevDockerImage, getRootInfo } from '../misc'; +import { buildDevDockerImage } from '../publish/utils'; +import { PublishOptions, PublishType } from '../publish/interfaces'; +import * as config from '../config'; +import { ensureServices } from '../test-runner/services'; + +// const logger = debugLogger('ts-scripts:cmd:k8s-env'); + +export async function launchK8sEnv(options: k8sEnvOptions) { + signale.pending('Starting k8s environment with the following options: ', options); + + const kindInstalled = await isKindInstalled(); + if (!kindInstalled) { + signale.error('Please install Kind before launching a k8s dev environment. https://kind.sigs.k8s.io/docs/user/quick-start'); + process.exit(1); + } + + const kubectlInstalled = await isKubectlInstalled(); + if (!kubectlInstalled) { + signale.error('Please install kubectl before launching a k8s dev environment. https://kubernetes.io/docs/tasks/tools/'); + process.exit(1); + } + + signale.pending('Creating kind cluster'); + await createKindCluster('k8s-env'); + signale.success('Kind cluster created'); + await createNamespace('services-ns.yaml'); + + const rootInfo = getRootInfo(); + const e2eImage = `${rootInfo.name}:e2e`; + + let devImage; + if (options.skipBuild) { + devImage = `${getDevDockerImage()}-nodev${options.nodeVersion}`; + } else { + try { + const publishOptions: PublishOptions = { + dryRun: true, + nodeVersion: options.nodeVersion, + type: PublishType.Dev + }; + devImage = await buildDevDockerImage(publishOptions); + } catch (err) { + signale.error('Docker image build failed: ', err); + process.exit(1); + } + } + + try { + await dockerTag(devImage, e2eImage); + } catch (err) { + signale.error(`Failed to tag docker image ${devImage} as ${e2eImage}.`, err); + } + + await loadTerasliceImage(e2eImage); + + await ensureServices('k8s_env', { + ...options, + debug: false, + trace: false, + bail: false, + watch: false, + all: false, + keepOpen: false, + reportCoverage: false, + useExistingServices: false, + elasticsearchAPIVersion: config.ELASTICSEARCH_API_VERSION, + ignoreMount: false, + testPlatform: 'kubernetes' + }); + + await deployK8sTeraslice(true); + signale.success('k8s environment ready.\nNext steps:\n\tAdd alias: teraslice-cli aliases add http://localhost:5678\n\t\tExample: teraslice-cli aliases add cluster1 http://localhost:5678\n\tLoad assets: teraslice-cli assets deploy \n\t\tExample: teraslice-cli assets deploy cluster1 terascope/elasticsearch-assets\n\tRegister a job: teraslice-cli tjm register \n\t\tExample: teraslice-cli tjm reg cluster1 JOB.JSON\n\tStart a job: teraslice-cli tjm start \n\t\tExample: teraslice-cli tjm start JOB.JSON\n\tSee the docs for more options: https://terascope.github.io/teraslice/docs/packages/teraslice-cli/overview'); +} diff --git a/packages/scripts/src/helpers/k8s-env/interfaces.ts b/packages/scripts/src/helpers/k8s-env/interfaces.ts new file mode 100644 index 00000000000..fe56e2e5056 --- /dev/null +++ b/packages/scripts/src/helpers/k8s-env/interfaces.ts @@ -0,0 +1,90 @@ +export interface k8sEnvOptions { + elasticsearchVersion: string; + kafkaVersion: string; + kafkaImageVersion: string, + zookeeperVersion: string, + minioVersion: string; + rabbitmqVersion: string; + opensearchVersion: string; + nodeVersion: string; + skipBuild: boolean; +} + +// TODO: create a common parent for each resource type, +// or use types from k8s-client when implemented. +export interface yamlDeploymentResource { + apiVersion: string; + kind: string; + metadata: { + name: string; + labels: { + 'app.kubernetes.io/name': string; + 'app.kubernetes.io/component': string; + } + }; + spec: { + replicas: number; + selector: { + matchLabels: { + 'app.kubernetes.io/name': string; + 'app.kubernetes.io/component': string; + }; + }; + template: { + metadata: { + labels: { + 'app.kubernetes.io/name': string; + 'app.kubernetes.io/component': string; + }; + }; + spec: { + containers: [ + { + name: string; + image: string; + ports: [ + { + containerPort: string; + } + ]; + env: [ + { + name: string; + value:string; + }, + { + name: string; + value: string; + } + ]; + } + ]; + }; + }; + }; +} + +export interface yamlServiceResource { + kind: string; + apiVersion: string; + metadata: { + name: string; + labels: { + 'app.kubernetes.io/name': string; + } + } + spec: { + selector: { + 'app.kubernetes.io/name': string; + 'app.kubernetes.io/component': string; + }, + ports: [ + { + port: number; + targetPort: number; + nodePort: number; + } + ], + type: string; + } +} diff --git a/packages/scripts/src/helpers/mapper.ts b/packages/scripts/src/helpers/mapper.ts new file mode 100644 index 00000000000..e5049dff8f3 --- /dev/null +++ b/packages/scripts/src/helpers/mapper.ts @@ -0,0 +1,46 @@ +/** + * This function maps a kafka version to the equivalent version of confluent/cp-kafka, + * which is the kafka docker image currently used in teraslice. + * The kafka version should include the major and minor versions, but not the patch version. + * Ref: https://docs.confluent.io/platform/current/installation/versions-interoperability.html#cp-and-apache-ak-compatibility +*/ +export function kafkaVersionMapper(kafkaVersion: string): string { + const regex = /(0|[1-9]\d*)\.(0|[1-9]\d*)/; + if (!regex.test(kafkaVersion)) { + throw new Error('Kafka version must contain major and minor semver version, but omit patch version.'); + } + const kafkaMapper = { + 3: { + 0: '7.0.11', + 1: '7.1.9', + 2: '7.2.7', + 3: '7.3.5', + 4: '7.4.2', + 5: '7.5.1' + }, + 2: { + 4: '5.4.10', + 5: '5.5.12', + 6: '6.0.15', + 7: '6.1.13', + 8: '6.2.12' + } + }; + + const cpKafkaVersion = kafkaMapper[kafkaVersion.charAt(0)][kafkaVersion.charAt(2)]; + if (!cpKafkaVersion) { + throw new Error(`Kafka version ${kafkaVersion} could not be mapped to cp-kafka version. Supported version are ${errMsgVersionStringBuilder(kafkaMapper)}.`); + } + return cpKafkaVersion; +} + +function errMsgVersionStringBuilder(map: object) { + const msgsArr: string[] = []; + for (const major in map) { + if (Object.prototype.hasOwnProperty.call(map, major)) { + const minors = Object.keys(map[major]); + msgsArr.push(`${major}.${minors[0]}-${major}.${minors[minors.length - 1]}`); + } + } + return msgsArr.join(', '); +} diff --git a/packages/scripts/src/helpers/scripts.ts b/packages/scripts/src/helpers/scripts.ts index 5b2758e52cb..ce958fea3ff 100644 --- a/packages/scripts/src/helpers/scripts.ts +++ b/packages/scripts/src/helpers/scripts.ts @@ -1,7 +1,10 @@ +import fs from 'fs'; +import os from 'os'; import ms from 'ms'; import path from 'path'; import execa from 'execa'; import fse from 'fs-extra'; +import yaml from 'js-yaml'; import { debugLogger, isString, @@ -15,8 +18,10 @@ import { getRootDir } from './misc'; import signale from './signale'; import * as config from './config'; import { getE2eK8sDir } from '../helpers/packages'; +import { yamlDeploymentResource, yamlServiceResource } from './k8s-env/interfaces'; const logger = debugLogger('ts-scripts:cmd'); +let TS_PORT = '45678'; export type ExecEnv = { [name: string]: string }; type ExecOpts = { @@ -534,13 +539,19 @@ export async function yarnPublish( }); } -export async function createKindCluster(): Promise { +export async function createKindCluster(cmd = 'test'): Promise { const e2eK8sDir = getE2eK8sDir(); if (!e2eK8sDir) { throw new Error('Missing k8s e2e test directory'); } - const configPath = path.join(e2eK8sDir, 'kindConfig.yaml'); + let configPath: string; + if (cmd === 'k8s-env') { + TS_PORT = '5678'; + configPath = path.join(e2eK8sDir, 'kindConfigDefaultPorts.yaml'); + } else { // cmd === test + configPath = path.join(e2eK8sDir, 'kindConfigTestPorts.yaml'); + } const subprocess = await execa.command(`kind create cluster --config ${configPath}`); logger.debug(subprocess.stderr); } @@ -603,7 +614,20 @@ export async function kindLoadServiceImage( } } -export async function kindStartService(serviceName: string): Promise { +export async function kindStartService( + serviceName: string, image: string, version: string +): Promise { + const availableServices = [ + 'elasticsearch', 'kafka', 'zookeeper', // 'opensearch', 'minio', 'rabbitmq' + ]; + + if (!availableServices.includes(serviceName)) { + signale.error(`Service ${serviceName} is not available. No kubernetes deployment yaml file in 'e2e/k8s' directory.`); + signale.info(`Remove ${serviceName} from the services list by running 'unset TEST_${serviceName.toUpperCase()}' in your terminal.`); + await destroyKindCluster(); + process.exit(1); + } + // Any new service's yaml file must be named 'Deployment.yaml' const yamlFile = `${serviceName}Deployment.yaml`; @@ -612,22 +636,31 @@ export async function kindStartService(serviceName: string): Promise { throw new Error('Missing k8s e2e test directory'); } + const imageString = `${image}:${version}`; + try { - const subprocess = await execa.command(`kubectl create -n services-dev1 -f ${path.join(e2eK8sDir, yamlFile)}`); + const jsDoc = yaml.loadAll(fs.readFileSync(`${path.join(e2eK8sDir, yamlFile)}`, 'utf8')) as Array; + const deployment = jsDoc[0] as yamlDeploymentResource; + deployment.spec.template.spec.containers[0].image = imageString; + const updatedYaml = jsDoc.map((doc) => yaml.dump(doc)).join('---\n'); + const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), 'tempYaml')); + fs.writeFileSync(path.join(tempDir, `${serviceName}Deployment.yaml`), updatedYaml); + const subprocess = await execa.command(`kubectl create -n services-dev1 -f ${path.join(tempDir, `${serviceName}Deployment.yaml`)}`); logger.debug(subprocess.stdout); + fs.rmSync(tempDir, { recursive: true, force: true }); } catch (err) { logger.error(`The service ${serviceName} could not be started: `, err); } if (serviceName === 'kafka') { - await waitForKafkaRunning(240000); + await waitForKafkaRunning(); } } -function waitForKafkaRunning(timeoutMs = 120000): Promise { +function waitForKafkaRunning(timeoutMs = 120000): Promise { const endAt = Date.now() + timeoutMs; - const _waitForKafkaRunning = async (): Promise => { + const _waitForKafkaRunning = async (): Promise => { if (Date.now() > endAt) { throw new Error(`Failure to communicate with kafka after ${timeoutMs}ms`); } @@ -645,7 +678,7 @@ function waitForKafkaRunning(timeoutMs = 120000): Promise { } if (kafkaRunning) { - return true; + return; } await pDelay(3000); return _waitForKafkaRunning(); @@ -677,7 +710,7 @@ export async function k8sSetup(): Promise { logger.debug(subprocess.stdout); } -export async function deployK8sTeraslice() { +export async function deployK8sTeraslice(wait = false) { const e2eK8sDir = getE2eK8sDir(); if (!e2eK8sDir) { throw new Error('Missing k8s e2e test directory'); @@ -698,6 +731,9 @@ export async function deployK8sTeraslice() { /// Creates deployment for teraslice subprocess = await execa.command(`kubectl create -n ts-dev1 -f ${path.join(e2eK8sDir, 'masterDeployment.yaml')}`); logger.debug(subprocess.stdout); + if (wait) { + await waitForTerasliceRunning(); + } } catch (err) { logger.error('Error deploying Teraslice'); logger.error(err); @@ -705,17 +741,58 @@ export async function deployK8sTeraslice() { } } -export async function setAliasAndBaseAssets(hostIP: string) { - await setAlias(hostIP); +async function waitForTerasliceRunning() { + const startTime = Date.now(); + signale.pending('Waiting for Teraslice...'); + + await waitForTerasliceResponse(); + + const elapsed = Date.now() - startTime; + + signale.success('Teraslice is ready to go,', `took ${ms(elapsed)}`); +} + +function waitForTerasliceResponse(timeoutMs = 120000) { + const endAt = Date.now() + timeoutMs; + + const _waitForTerasliceRunning = async (): Promise => { + if (Date.now() > endAt) { + throw new Error(`Failure to communicate with teraslice after ${timeoutMs}ms`); + } + + let terasliceRunning = false; + try { + const kubectlResponse = await execa.command(`curl http://${config.HOST_IP}:${TS_PORT}`); + const response = JSON.parse(kubectlResponse.stdout); + if (response.clustering_type === 'kubernetes') { + terasliceRunning = true; + } + } catch (err) { + await pDelay(3000); + return _waitForTerasliceRunning(); + } + + if (terasliceRunning) { + return true; + } + await pDelay(3000); + return _waitForTerasliceRunning(); + }; + + return _waitForTerasliceRunning(); +} + +export async function setAliasAndBaseAssets() { + await setAlias(); await deployAssets('elasticsearch'); await deployAssets('standard'); await deployAssets('kafka'); } -async function setAlias(hostIP: string) { +async function setAlias() { let subprocess = await execa.command('earl aliases remove k8se2e 2> /dev/null || true', { shell: true }); logger.debug(subprocess.stdout); - subprocess = await execa.command(`earl aliases add k8se2e http://${hostIP}:45678`); + subprocess = await execa.command(`earl aliases add k8se2e http://${config.HOST_IP}:${TS_PORT}`); logger.debug(subprocess.stdout); } @@ -733,21 +810,21 @@ export async function deleteTerasliceNamespace() { } } -export async function showState(hostIP: string) { +export async function showState() { const subprocess = await execa.command('kubectl get deployments,po,svc --all-namespaces --show-labels -o wide'); logger.debug(subprocess.stdout); - await showESIndices(hostIP); - await showAssets(hostIP); + await showESIndices(); + await showAssets(); } -async function showESIndices(hostIP: string) { - const subprocess = await execa.command(`curl ${hostIP}:49200/_cat/indices?v`); +async function showESIndices() { + const subprocess = await execa.command(`curl ${config.HOST_IP}:${config.ELASTICSEARCH_PORT}/_cat/indices?v`); logger.debug(subprocess.stdout); } -async function showAssets(hostIP: string) { +async function showAssets() { try { - const subprocess = await execa.command(`curl ${hostIP}:45678/v1/assets`); + const subprocess = await execa.command(`curl ${config.HOST_IP}:${TS_PORT}/v1/assets`); logger.debug(subprocess.stdout); } catch (err) { logger.debug(err); diff --git a/packages/scripts/src/helpers/test-runner/services.ts b/packages/scripts/src/helpers/test-runner/services.ts index 6c3e450de92..5eda768f04e 100644 --- a/packages/scripts/src/helpers/test-runner/services.ts +++ b/packages/scripts/src/helpers/test-runner/services.ts @@ -214,7 +214,6 @@ export async function pullServices(suite: string, options: TestOptions): Promise export async function ensureServices(suite: string, options: TestOptions): Promise<() => void> { const launchServices = getServicesForSuite(suite); - const promises: Promise<(() => void)>[] = []; if (launchServices.includes(Service.Elasticsearch)) { @@ -746,7 +745,7 @@ async function startService(options: TestOptions, service: Service): Promise<() if (options.testPlatform === 'kubernetes') { await kindStopService(service); await kindLoadServiceImage(service, services[service].image, version); - await kindStartService(service); + await kindStartService(service, services[service].image, version); return () => { }; } diff --git a/packages/scripts/test/helpers-spec.ts b/packages/scripts/test/helpers-spec.ts index 65d6f20085d..403a4431c61 100644 --- a/packages/scripts/test/helpers-spec.ts +++ b/packages/scripts/test/helpers-spec.ts @@ -3,13 +3,14 @@ import * as index from '../src'; import * as config from '../src/helpers/config'; import { makeArray } from '../src/helpers/args'; import { getName } from '../src/helpers/misc'; +import { kafkaVersionMapper } from '../src/helpers/mapper'; describe('Helpers', () => { it('should export an object', () => { expect(index).toBeObject(); }); - it('should be able to have a conifg', () => { + it('should be able to have a config', () => { expect(config).toHaveProperty('HOST_IP'); expect(config).toHaveProperty('ELASTICSEARCH_HOST'); expect(config).toHaveProperty('KAFKA_BROKER'); @@ -42,4 +43,21 @@ describe('Helpers', () => { expect(getName('hi-there')).toEqual('Hi There'); expect(getName('hi there')).toEqual('Hi There'); }); + + describe('->kakfaVersionMapper', () => { + it('should throw error on invalid kafka versions', () => { + expect(() => kafkaVersionMapper('2.3')).toThrow(); + expect(() => kafkaVersionMapper('2.9')).toThrow(); + expect(() => kafkaVersionMapper('3')).toThrow(); + expect(() => kafkaVersionMapper('3.6')).toThrow(); + expect(() => kafkaVersionMapper('4.0')).toThrow(); + }); + + it('should be able to convert kafka versions to the proper confluent/cp-kafka versions', () => { + expect(kafkaVersionMapper('3.1')).toBe('7.1.9'); + expect(kafkaVersionMapper('3.5')).toBe('7.5.1'); + expect(kafkaVersionMapper('2.4')).toBe('5.4.10'); + expect(kafkaVersionMapper('2.8')).toBe('6.2.12'); + }); + }); });