From 5f84d1c88042efe381fff5989db642a5ec070dad Mon Sep 17 00:00:00 2001 From: Mostafa Moradian Date: Mon, 21 Nov 2022 15:18:11 +0100 Subject: [PATCH] Lint scripts, configs and docs (#178) * Run prettier on all the scripts * Run prettier on JS API docs * Run prettier on markdown and YAML files * Dry run prettier --- .github/workflows/build.yaml | 12 +- .github/workflows/test.yaml | 7 +- .golanci.yaml | 2 +- README.md | 417 +++++++-------- api-docs/docs/README.md | 16 +- api-docs/docs/classes/Connection.md | 18 +- api-docs/docs/classes/Reader.md | 12 +- api-docs/docs/classes/SchemaRegistry.md | 44 +- api-docs/docs/classes/Writer.md | 14 +- api-docs/docs/enums/BALANCERS.md | 38 +- api-docs/docs/enums/COMPRESSION_CODECS.md | 32 +- api-docs/docs/enums/ELEMENT_TYPES.md | 4 +- api-docs/docs/enums/GROUP_BALANCERS.md | 24 +- api-docs/docs/enums/ISOLATION_LEVEL.md | 16 +- api-docs/docs/enums/SASL_MECHANISMS.md | 34 +- api-docs/docs/enums/SCHEMA_TYPES.md | 40 +- api-docs/docs/enums/SUBJECT_NAME_STRATEGY.md | 24 +- api-docs/docs/enums/TLS_VERSIONS.md | 32 +- api-docs/docs/interfaces/BasicAuth.md | 2 +- api-docs/docs/interfaces/ConfigEntry.md | 2 +- api-docs/docs/interfaces/ConnectionConfig.md | 4 +- api-docs/docs/interfaces/Container.md | 4 +- api-docs/docs/interfaces/Message.md | 14 +- api-docs/docs/interfaces/ReaderConfig.md | 54 +- api-docs/docs/interfaces/ReplicaAssignment.md | 2 +- api-docs/docs/interfaces/SASLConfig.md | 4 +- api-docs/docs/interfaces/Schema.md | 4 +- .../docs/interfaces/SchemaRegistryConfig.md | 4 +- api-docs/docs/interfaces/SubjectNameConfig.md | 6 +- api-docs/docs/interfaces/TLSConfig.md | 10 +- api-docs/docs/interfaces/TopicConfig.md | 8 +- api-docs/docs/interfaces/WriterConfig.md | 26 +- api-docs/index.d.ts | 477 +++++++++--------- api-docs/package.json | 36 +- api-docs/tsconfig.json | 16 +- api-docs/typedoc.json | 16 +- scripts/test_avro_no_schema_registry.js | 104 ++-- scripts/test_avro_with_schema_registry.js | 174 +++---- scripts/test_bytes.js | 129 ++--- scripts/test_json.js | 228 +++++---- .../test_jsonschema_with_schema_registry.js | 214 ++++---- scripts/test_sasl_auth.js | 225 +++++---- scripts/test_string.js | 170 ++++--- scripts/test_topics.js | 14 +- 44 files changed, 1391 insertions(+), 1342 deletions(-) diff --git a/.github/workflows/build.yaml b/.github/workflows/build.yaml index 9de0138..ec9a8a5 100644 --- a/.github/workflows/build.yaml +++ b/.github/workflows/build.yaml @@ -19,7 +19,7 @@ jobs: if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') uses: actions/setup-go@v3 with: - go-version: '1.18' + go-version: "1.18" - name: Install xk6 and build xk6-kafka for different platforms 🏗️ if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') @@ -86,19 +86,19 @@ jobs: if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') uses: aquasecurity/trivy-action@master with: - scan-type: 'fs' - format: 'sarif' - output: 'trivy-results.sarif' + scan-type: "fs" + format: "sarif" + output: "trivy-results.sarif" exit-code: 1 ignore-unfixed: true - severity: 'CRITICAL,HIGH' + severity: "CRITICAL,HIGH" skip-dirs: "fixtures" - name: Upload Trivy scan results to GitHub Security tab ⬆️ if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') uses: github/codeql-action/upload-sarif@v1 with: - sarif_file: 'trivy-results.sarif' + sarif_file: "trivy-results.sarif" - name: Scan Docker image with Syft and generate SBOM 🦉 if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index b0ada81..bf0b218 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -21,7 +21,12 @@ jobs: - name: Install Go 🧑‍💻 uses: actions/setup-go@v3 with: - go-version: '1.18' + go-version: "1.18" + + - name: Run prettier for linting scripts, configs and docs 🧹 + uses: creyD/prettier_action@v4.2 + with: + dry: True - name: Lint code issues 🚨 uses: golangci/golangci-lint-action@v3 diff --git a/.golanci.yaml b/.golanci.yaml index 12cd00a..00de552 100644 --- a/.golanci.yaml +++ b/.golanci.yaml @@ -13,4 +13,4 @@ linters: - exhaustruct - gocognit - gochecknoinits - - gocyclo \ No newline at end of file + - gocyclo diff --git a/README.md b/README.md index d2fad81..09556a7 100644 --- a/README.md +++ b/README.md @@ -10,7 +10,7 @@ You can send many messages with each connection to Kafka. These messages are arr For debugging and testing purposes, a consumer is available to make sure you send the correct data to Kafka. -If you want to learn more about the extension, read the [article](https://k6.io/blog/load-test-your-kafka-producers-and-consumers-using-k6/) (outdated) explaining how to load test your Kafka producers and consumers using k6 on the k6 blog. You can also watch [this recording](https://www.youtube.com/watch?v=NQ0fyhq1mxo) of the k6 Office Hours about this extension. +If you want to learn more about the extension, read the [article](https://k6.io/blog/load-test-your-kafka-producers-and-consumers-using-k6/) (outdated) explaining how to load test your Kafka producers and consumers using k6 on the k6 blog. You can also watch [this recording](https://www.youtube.com/watch?v=NQ0fyhq1mxo) of the k6 Office Hours about this extension. ## Supported Features @@ -64,15 +64,15 @@ Feel free to skip the first two steps if you already have Go installed. 2. Install the latest version of Go using gvm. You need Go 1.4 installed for bootstrapping into higher Go versions, as explained [here](https://github.com/moovweb/gvm#a-note-on-compiling-go-15). 3. Install `xk6`: - ```shell - go install go.k6.io/xk6/cmd/xk6@latest - ``` + ```shell + go install go.k6.io/xk6/cmd/xk6@latest + ``` 4. Build the binary: - ```shell - xk6 build --with github.com/mostafa/xk6-kafka@latest - ``` + ```shell + xk6 build --with github.com/mostafa/xk6-kafka@latest + ``` > **Note** > You can always use the latest version of k6 to build the extension, but the earliest version of k6 that supports extensions via xk6 is v0.32.0. The xk6 is constantly evolving, so some APIs may not be backward compatible. @@ -100,34 +100,34 @@ I recommend the [fast-data-dev](https://github.com/lensesio/fast-data-dev) Docke 1. Run the Kafka environment and expose the ports: - ```bash - sudo docker run \ - --detach --rm \ - --name lensesio \ - -p 2181:2181 \ - -p 3030:3030 \ - -p 8081-8083:8081-8083 \ - -p 9581-9585:9581-9585 \ - -p 9092:9092 \ - -e ADV_HOST=127.0.0.1 \ - -e RUN_TESTS=0 \ - lensesio/fast-data-dev:latest - ``` + ```bash + sudo docker run \ + --detach --rm \ + --name lensesio \ + -p 2181:2181 \ + -p 3030:3030 \ + -p 8081-8083:8081-8083 \ + -p 9581-9585:9581-9585 \ + -p 9092:9092 \ + -e ADV_HOST=127.0.0.1 \ + -e RUN_TESTS=0 \ + lensesio/fast-data-dev:latest + ``` 2. After running the command, visit [localhost:3030](http://localhost:3030) to get into the fast-data-dev environment. 3. You can run the command to see the container logs: - ```bash - sudo docker logs -f -t lensesio - ``` + ```bash + sudo docker logs -f -t lensesio + ``` > **Note:** > If you have errors running the Kafka development environment, refer to the [fast-data-dev documentation](https://github.com/lensesio/fast-data-dev). ### The xk6-kafka API -All the exported functions are available by importing the module object from `k6/x/kafka`. The exported objects, constants and other data structures are available in the [`index.d.ts`](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts) file, and they always reflect the *latest* changes on the `main` branch. You can access the generated documentation at [`api-docs/docs/README.md`](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/docs/README.md). +All the exported functions are available by importing the module object from `k6/x/kafka`. The exported objects, constants and other data structures are available in the [`index.d.ts`](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts) file, and they always reflect the _latest_ changes on the `main` branch. You can access the generated documentation at [`api-docs/docs/README.md`](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/docs/README.md). > ⚠️ **Warning:** > The Javascript API is subject to change in future versions unless a new major version is released. @@ -138,216 +138,221 @@ The example scripts are available as `test_.js` with more code a 1. To use the extension, you need to import it in your script, like any other JS module: - ```javascript - // Either import the module object - import * as kafka from "k6/x/kafka"; + ```javascript + // Either import the module object + import * as kafka from "k6/x/kafka"; - // Or individual classes and constants - import { Writer, Reader, Connection, SchemaRegistry, SCHEMA_TYPE_STRING } from "k6/x/kafka"; - ``` + // Or individual classes and constants + import { + Writer, + Reader, + Connection, + SchemaRegistry, + SCHEMA_TYPE_STRING, + } from "k6/x/kafka"; + ``` 2. You need to instantiate the classes in the `init` context. All the [k6 options](https://k6.io/docs/using-k6/k6-options/) are also configured here: - ```javascript - // Creates a new Writer object to produce messages to Kafka - const writer = new Writer({ - // WriterConfig object - brokers: ["localhost:9092"], - topic: "my-topic", - }); - - const reader = new Reader({ - // ReaderConfig object - brokers: ["localhost:9092"], - topic: "my-topic", - }); - - const connection = new Connection({ - // ConnectionConfig object - address: "localhost:9092", - }); - - const schemaRegistry = new SchemaRegistry( - // Can accept a SchemaRegistryConfig object - ) - - if (__VU == 0) { - // Create a topic on initialization (before producing messages) - connection.createTopic({ - // TopicConfig object - topic: "my-topic", - }); - } - ``` + ```javascript + // Creates a new Writer object to produce messages to Kafka + const writer = new Writer({ + // WriterConfig object + brokers: ["localhost:9092"], + topic: "my-topic", + }); + + const reader = new Reader({ + // ReaderConfig object + brokers: ["localhost:9092"], + topic: "my-topic", + }); + + const connection = new Connection({ + // ConnectionConfig object + address: "localhost:9092", + }); + + const schemaRegistry = new SchemaRegistry(); + // Can accept a SchemaRegistryConfig object + + if (__VU == 0) { + // Create a topic on initialization (before producing messages) + connection.createTopic({ + // TopicConfig object + topic: "my-topic", + }); + } + ``` 3. In the VU code, you can produce messages to Kafka or consume messages from it: - ```javascript - export default function() { - // Fetch the list of all topics - const topics = connection.listTopics(); - console.log(topics); // list of topics - - // Produces message to Kafka - writer.produce({ - // ProduceConfig object - messages: [ - // Message object(s) - { - key: schemaRegistry.serialize({ - data: "my-key", - schemaType: SCHEMA_TYPE_STRING, - }), - value: schemaRegistry.serialize({ - data: "my-value", - schemaType: SCHEMA_TYPE_STRING, - }), - }, - ], - }); - - // Consume messages from Kafka - let messages = reader.consume({ - // ConsumeConfig object - limit: 10 - }); - - // your messages - console.log(messages); - - // You can use checks to verify the contents, - // length and other properties of the message(s) - - // To serialize the data back into a string, you should use - // the deserialize method of the Schema Registry client. You - // can use it inside a check, as shown in the example scripts. - let deserializedValue = schemaRegistry.deserialize({ - data: messages[0].value, - schemaType: SCHEMA_TYPE_STRING, - }) - } - ``` + ```javascript + export default function () { + // Fetch the list of all topics + const topics = connection.listTopics(); + console.log(topics); // list of topics + + // Produces message to Kafka + writer.produce({ + // ProduceConfig object + messages: [ + // Message object(s) + { + key: schemaRegistry.serialize({ + data: "my-key", + schemaType: SCHEMA_TYPE_STRING, + }), + value: schemaRegistry.serialize({ + data: "my-value", + schemaType: SCHEMA_TYPE_STRING, + }), + }, + ], + }); + + // Consume messages from Kafka + let messages = reader.consume({ + // ConsumeConfig object + limit: 10, + }); + + // your messages + console.log(messages); + + // You can use checks to verify the contents, + // length and other properties of the message(s) + + // To serialize the data back into a string, you should use + // the deserialize method of the Schema Registry client. You + // can use it inside a check, as shown in the example scripts. + let deserializedValue = schemaRegistry.deserialize({ + data: messages[0].value, + schemaType: SCHEMA_TYPE_STRING, + }); + } + ``` 4. In the `teardown` function, close all the connections and possibly delete the topic: - ```javascript - export function teardown(data) { - // Delete the topic - connection.deleteTopic("my-topic"); + ```javascript + export function teardown(data) { + // Delete the topic + connection.deleteTopic("my-topic"); - // Close all connections - writer.close(); - reader.close(); - connection.close(); - } - ``` + // Close all connections + writer.close(); + reader.close(); + connection.close(); + } + ``` 5. You can now run k6 with the extension using the following command: - ```bash - ./k6 run --vus 50 --duration 60s scripts/test_json.js - ``` + ```bash + ./k6 run --vus 50 --duration 60s scripts/test_json.js + ``` 6. And here's the test result output: - ```bash - - /\ |‾‾| /‾‾/ /‾‾/ - /\ / \ | |/ / / / - / \/ \ | ( / ‾‾\ - / \ | |\ \ | (‾) | - / __________ \ |__| \__\ \_____/ .io - - execution: local - script: scripts/test_json.js - output: - - - scenarios: (100.00%) 1 scenario, 50 max VUs, 1m30s max duration (incl. graceful stop): - * default: 50 looping VUs for 1m0s (gracefulStop: 30s) - - - running (1m04.4s), 00/50 VUs, 20170 complete and 0 interrupted iterations - default ✓ [======================================] 50 VUs 1m0s - - ✓ 10 messages are received - ✓ Topic equals to xk6_kafka_json_topic - ✓ Key contains key/value and is JSON - ✓ Value contains key/value and is JSON - ✓ Header equals {'mykey': 'myvalue'} - ✓ Time is past - ✓ Partition is zero - ✓ Offset is gte zero - ✓ High watermark is gte zero - - █ teardown - - checks.........................: 100.00% ✓ 181530 ✗ 0 - data_received..................: 0 B 0 B/s - data_sent......................: 0 B 0 B/s - iteration_duration.............: avg=153.45ms min=6.01ms med=26.8ms max=8.14s p(90)=156.3ms p(95)=206.4ms - iterations.....................: 20170 313.068545/s - kafka.reader.dial.count........: 50 0.776075/s - kafka.reader.dial.seconds......: avg=171.22µs min=0s med=0s max=1.09s p(90)=0s p(95)=0s - ✓ kafka.reader.error.count.......: 0 0/s - kafka.reader.fetch_bytes.max...: 1000000 min=1000000 max=1000000 - kafka.reader.fetch_bytes.min...: 1 min=1 max=1 - kafka.reader.fetch_wait.max....: 200ms min=200ms max=200ms - kafka.reader.fetch.bytes.......: 58 MB 897 kB/s - kafka.reader.fetch.size........: 147167 2284.25179/s - kafka.reader.fetches.count.....: 107 1.6608/s - kafka.reader.lag...............: 1519055 min=0 max=2436190 - kafka.reader.message.bytes.....: 40 MB 615 kB/s - kafka.reader.message.count.....: 201749 3131.446006/s - kafka.reader.offset............: 4130 min=11 max=5130 - kafka.reader.queue.capacity....: 1 min=1 max=1 - kafka.reader.queue.length......: 1 min=0 max=1 - kafka.reader.read.seconds......: avg=96.5ms min=0s med=0s max=59.37s p(90)=0s p(95)=0s - kafka.reader.rebalance.count...: 0 0/s - kafka.reader.timeouts.count....: 57 0.884725/s - kafka.reader.wait.seconds......: avg=102.71µs min=0s med=0s max=85.71ms p(90)=0s p(95)=0s - kafka.writer.acks.required.....: 0 min=0 max=0 - kafka.writer.async.............: 0.00% ✓ 0 ✗ 2017000 - kafka.writer.attempts.max......: 0 min=0 max=0 - kafka.writer.batch.bytes.......: 441 MB 6.8 MB/s - kafka.writer.batch.max.........: 1 min=1 max=1 - kafka.writer.batch.size........: 2017000 31306.854525/s - kafka.writer.batch.timeout.....: 0s min=0s max=0s - ✓ kafka.writer.error.count.......: 0 0/s - kafka.writer.message.bytes.....: 883 MB 14 MB/s - kafka.writer.message.count.....: 4034000 62613.709051/s - kafka.writer.read.timeout......: 0s min=0s max=0s - kafka.writer.retries.count.....: 0 0/s - kafka.writer.wait.seconds......: avg=0s min=0s med=0s max=0s p(90)=0s p(95)=0s - kafka.writer.write.count.......: 4034000 62613.709051/s - kafka.writer.write.seconds.....: avg=523.21µs min=4.84µs med=14.48µs max=4.05s p(90)=33.85µs p(95)=42.68µs - kafka.writer.write.timeout.....: 0s min=0s max=0s - vus............................: 7 min=7 max=50 - vus_max........................: 50 min=50 max=50 - ``` + ```bash + + /\ |‾‾| /‾‾/ /‾‾/ + /\ / \ | |/ / / / + / \/ \ | ( / ‾‾\ + / \ | |\ \ | (‾) | + / __________ \ |__| \__\ \_____/ .io + + execution: local + script: scripts/test_json.js + output: - + + scenarios: (100.00%) 1 scenario, 50 max VUs, 1m30s max duration (incl. graceful stop): + * default: 50 looping VUs for 1m0s (gracefulStop: 30s) + + + running (1m04.4s), 00/50 VUs, 20170 complete and 0 interrupted iterations + default ✓ [======================================] 50 VUs 1m0s + + ✓ 10 messages are received + ✓ Topic equals to xk6_kafka_json_topic + ✓ Key contains key/value and is JSON + ✓ Value contains key/value and is JSON + ✓ Header equals {'mykey': 'myvalue'} + ✓ Time is past + ✓ Partition is zero + ✓ Offset is gte zero + ✓ High watermark is gte zero + + █ teardown + + checks.........................: 100.00% ✓ 181530 ✗ 0 + data_received..................: 0 B 0 B/s + data_sent......................: 0 B 0 B/s + iteration_duration.............: avg=153.45ms min=6.01ms med=26.8ms max=8.14s p(90)=156.3ms p(95)=206.4ms + iterations.....................: 20170 313.068545/s + kafka.reader.dial.count........: 50 0.776075/s + kafka.reader.dial.seconds......: avg=171.22µs min=0s med=0s max=1.09s p(90)=0s p(95)=0s + ✓ kafka.reader.error.count.......: 0 0/s + kafka.reader.fetch_bytes.max...: 1000000 min=1000000 max=1000000 + kafka.reader.fetch_bytes.min...: 1 min=1 max=1 + kafka.reader.fetch_wait.max....: 200ms min=200ms max=200ms + kafka.reader.fetch.bytes.......: 58 MB 897 kB/s + kafka.reader.fetch.size........: 147167 2284.25179/s + kafka.reader.fetches.count.....: 107 1.6608/s + kafka.reader.lag...............: 1519055 min=0 max=2436190 + kafka.reader.message.bytes.....: 40 MB 615 kB/s + kafka.reader.message.count.....: 201749 3131.446006/s + kafka.reader.offset............: 4130 min=11 max=5130 + kafka.reader.queue.capacity....: 1 min=1 max=1 + kafka.reader.queue.length......: 1 min=0 max=1 + kafka.reader.read.seconds......: avg=96.5ms min=0s med=0s max=59.37s p(90)=0s p(95)=0s + kafka.reader.rebalance.count...: 0 0/s + kafka.reader.timeouts.count....: 57 0.884725/s + kafka.reader.wait.seconds......: avg=102.71µs min=0s med=0s max=85.71ms p(90)=0s p(95)=0s + kafka.writer.acks.required.....: 0 min=0 max=0 + kafka.writer.async.............: 0.00% ✓ 0 ✗ 2017000 + kafka.writer.attempts.max......: 0 min=0 max=0 + kafka.writer.batch.bytes.......: 441 MB 6.8 MB/s + kafka.writer.batch.max.........: 1 min=1 max=1 + kafka.writer.batch.size........: 2017000 31306.854525/s + kafka.writer.batch.timeout.....: 0s min=0s max=0s + ✓ kafka.writer.error.count.......: 0 0/s + kafka.writer.message.bytes.....: 883 MB 14 MB/s + kafka.writer.message.count.....: 4034000 62613.709051/s + kafka.writer.read.timeout......: 0s min=0s max=0s + kafka.writer.retries.count.....: 0 0/s + kafka.writer.wait.seconds......: avg=0s min=0s med=0s max=0s p(90)=0s p(95)=0s + kafka.writer.write.count.......: 4034000 62613.709051/s + kafka.writer.write.seconds.....: avg=523.21µs min=4.84µs med=14.48µs max=4.05s p(90)=33.85µs p(95)=42.68µs + kafka.writer.write.timeout.....: 0s min=0s max=0s + vus............................: 7 min=7 max=50 + vus_max........................: 50 min=50 max=50 + ``` ### FAQ 1. Why do I receive `Error writing messages`? - There are a few reasons why this might happen. The most prominent one is that the topic might not exist, which causes the producer to fail to send messages to a non-existent topic. You can use `Connection.createTopic` method to create the topic in Kafka, as shown in `scripts/test_topics.js`. You can also set the `autoCreateTopic` on the `WriterConfig`. You can also create a topic using the `kafka-topics` command: + There are a few reasons why this might happen. The most prominent one is that the topic might not exist, which causes the producer to fail to send messages to a non-existent topic. You can use `Connection.createTopic` method to create the topic in Kafka, as shown in `scripts/test_topics.js`. You can also set the `autoCreateTopic` on the `WriterConfig`. You can also create a topic using the `kafka-topics` command: - ```bash - $ docker exec -it lensesio bash - (inside container)$ kafka-topics --create --topic xk6_kafka_avro_topic --bootstrap-server localhost:9092 - (inside container)$ kafka-topics --create --topic xk6_kafka_json_topic --bootstrap-server localhost:9092 - ``` + ```bash + $ docker exec -it lensesio bash + (inside container)$ kafka-topics --create --topic xk6_kafka_avro_topic --bootstrap-server localhost:9092 + (inside container)$ kafka-topics --create --topic xk6_kafka_json_topic --bootstrap-server localhost:9092 + ``` 2. Why does the `reader.consume` keeps hanging? - If the `reader.consume` keeps hanging, it might be because the topic doesn't exist or is empty. + If the `reader.consume` keeps hanging, it might be because the topic doesn't exist or is empty. 3. I want to test SASL authentication. How should I do that? - If you want to test SASL authentication, look at [this commit message](https://github.com/mostafa/xk6-kafka/pull/3/commits/403fbc48d13683d836b8033eeeefa48bf2f25c6e), in which I describe how to run a test environment to test SASL authentication. + If you want to test SASL authentication, look at [this commit message](https://github.com/mostafa/xk6-kafka/pull/3/commits/403fbc48d13683d836b8033eeeefa48bf2f25c6e), in which I describe how to run a test environment to test SASL authentication. 4. Why doesn't the consumer group consuming messages from the topic? - As explained in issue [#37](https://github.com/mostafa/xk6-kafka/issues/37), multiple inits by k6 causes multiple consumer group instances to be created in the init context, which sometimes causes the random partitions to be selected by each instance. This, in turn, causes confusion when consuming messages from different partitions. This can be solved by using a UUID when naming the consumer group, thereby guaranteeing that the consumer group object was assigned to all partitions in a topic. + As explained in issue [#37](https://github.com/mostafa/xk6-kafka/issues/37), multiple inits by k6 causes multiple consumer group instances to be created in the init context, which sometimes causes the random partitions to be selected by each instance. This, in turn, causes confusion when consuming messages from different partitions. This can be solved by using a UUID when naming the consumer group, thereby guaranteeing that the consumer group object was assigned to all partitions in a topic. ## Contributions, Issues and Feedback @@ -355,13 +360,13 @@ I'd be thrilled to receive contributions and feedback on this project. You're al ## Backward Compatibility Notice -If you want to keep up to date with the latest changes, please follow the [project board](https://github.com/users/mostafa/projects/1). Also, since [v0.9.0](https://github.com/mostafa/xk6-kafka/releases/tag/v0.9.0), the `main` branch is the *development* branch and usually has the latest changes and might be unstable. If you want to use the latest features, you might need to build your binary by following the [build from source](https://github.com/mostafa/xk6-kafka/blob/main/README.md#build-from-source) instructions. In turn, the tagged releases and the Docker images are more stable. +If you want to keep up to date with the latest changes, please follow the [project board](https://github.com/users/mostafa/projects/1). Also, since [v0.9.0](https://github.com/mostafa/xk6-kafka/releases/tag/v0.9.0), the `main` branch is the _development_ branch and usually has the latest changes and might be unstable. If you want to use the latest features, you might need to build your binary by following the [build from source](https://github.com/mostafa/xk6-kafka/blob/main/README.md#build-from-source) instructions. In turn, the tagged releases and the Docker images are more stable. -I make no guarantee to keep the API stable, as this project is in *active development* unless I release a major version. The best way to keep up with the changes is to follow [the xk6-kafka API](https://github.com/mostafa/xk6-kafka/blob/main/README.md#the-xk6-kafka-api) and look at the [scripts](https://github.com/mostafa/xk6-kafka/blob/main/scripts/) directory. +I make no guarantee to keep the API stable, as this project is in _active development_ unless I release a major version. The best way to keep up with the changes is to follow [the xk6-kafka API](https://github.com/mostafa/xk6-kafka/blob/main/README.md#the-xk6-kafka-api) and look at the [scripts](https://github.com/mostafa/xk6-kafka/blob/main/scripts/) directory. ## The Release Process -The `main` branch is the *development* branch, and the pull requests will be *squashed and merged* into the `main` branch. When a commit is tagged with a version, for example, `v0.10.0`, the build pipeline will build the `main` branch on that commit. The build process creates the binaries and the Docker image. If you want to test the latest unreleased features, you can clone the `main` branch and instruct the `xk6` to use the locally cloned repository instead of using the `@latest`, which refers to the latest tagged version, as explained in the [build for development](https://github.com/mostafa/xk6-kafka/blob/main/README.md#build-for-development) section. +The `main` branch is the _development_ branch, and the pull requests will be _squashed and merged_ into the `main` branch. When a commit is tagged with a version, for example, `v0.10.0`, the build pipeline will build the `main` branch on that commit. The build process creates the binaries and the Docker image. If you want to test the latest unreleased features, you can clone the `main` branch and instruct the `xk6` to use the locally cloned repository instead of using the `@latest`, which refers to the latest tagged version, as explained in the [build for development](https://github.com/mostafa/xk6-kafka/blob/main/README.md#build-for-development) section. ## The CycloneDX SBOM @@ -369,6 +374,6 @@ CycloneDX SBOMs in JSON format are generated for [go.mod](go.mod) (as of [v0.9.0 ## Disclaimer -This project *was* a proof of concept but seems to be used by some companies nowadays. However, it isn't supported by the k6 team, but rather by [me](https://github.com/mostafa) personally, and the APIs may change in the future. USE AT YOUR OWN RISK! +This project _was_ a proof of concept but seems to be used by some companies nowadays. However, it isn't supported by the k6 team, but rather by [me](https://github.com/mostafa) personally, and the APIs may change in the future. USE AT YOUR OWN RISK! This project was AGPL3-licensed up until 7 October 2021, and then we [relicensed](https://github.com/mostafa/xk6-kafka/pull/25) it under the [Apache License 2.0](https://github.com/mostafa/xk6-kafka/blob/master/LICENSE). diff --git a/api-docs/docs/README.md b/api-docs/docs/README.md index 4648d43..a06959d 100644 --- a/api-docs/docs/README.md +++ b/api-docs/docs/README.md @@ -11,14 +11,14 @@ This documentation refers to the development version of the xk6-kafka project, w ### Enumerations - [BALANCERS](enums/BALANCERS.md) -- [COMPRESSION\_CODECS](enums/COMPRESSION_CODECS.md) -- [ELEMENT\_TYPES](enums/ELEMENT_TYPES.md) -- [GROUP\_BALANCERS](enums/GROUP_BALANCERS.md) -- [ISOLATION\_LEVEL](enums/ISOLATION_LEVEL.md) -- [SASL\_MECHANISMS](enums/SASL_MECHANISMS.md) -- [SCHEMA\_TYPES](enums/SCHEMA_TYPES.md) -- [SUBJECT\_NAME\_STRATEGY](enums/SUBJECT_NAME_STRATEGY.md) -- [TLS\_VERSIONS](enums/TLS_VERSIONS.md) +- [COMPRESSION_CODECS](enums/COMPRESSION_CODECS.md) +- [ELEMENT_TYPES](enums/ELEMENT_TYPES.md) +- [GROUP_BALANCERS](enums/GROUP_BALANCERS.md) +- [ISOLATION_LEVEL](enums/ISOLATION_LEVEL.md) +- [SASL_MECHANISMS](enums/SASL_MECHANISMS.md) +- [SCHEMA_TYPES](enums/SCHEMA_TYPES.md) +- [SUBJECT_NAME_STRATEGY](enums/SUBJECT_NAME_STRATEGY.md) +- [TLS_VERSIONS](enums/TLS_VERSIONS.md) ### Classes diff --git a/api-docs/docs/classes/Connection.md b/api-docs/docs/classes/Connection.md index a36f021..2ca193c 100644 --- a/api-docs/docs/classes/Connection.md +++ b/api-docs/docs/classes/Connection.md @@ -38,8 +38,8 @@ connection.close(); #### Parameters -| Name | Type | Description | -| :------ | :------ | :------ | +| Name | Type | Description | +| :----------------- | :------------------------------------------------------ | :------------------------ | | `connectionConfig` | [`ConnectionConfig`](../interfaces/ConnectionConfig.md) | Connection configuration. | #### Defined in @@ -66,7 +66,7 @@ connection.close(); [index.d.ts:379](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L379) -___ +--- ### createTopic @@ -77,8 +77,8 @@ Create a new topic. #### Parameters -| Name | Type | Description | -| :------ | :------ | :------ | +| Name | Type | Description | +| :------------ | :-------------------------------------------- | :------------------- | | `topicConfig` | [`TopicConfig`](../interfaces/TopicConfig.md) | Topic configuration. | #### Returns @@ -91,7 +91,7 @@ Create a new topic. [index.d.ts:360](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L360) -___ +--- ### deleteTopic @@ -102,8 +102,8 @@ Delete a topic. #### Parameters -| Name | Type | Description | -| :------ | :------ | :------ | +| Name | Type | Description | +| :------ | :------- | :---------- | | `topic` | `string` | Topic name. | #### Returns @@ -116,7 +116,7 @@ Delete a topic. [index.d.ts:367](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L367) -___ +--- ### listTopics diff --git a/api-docs/docs/classes/Reader.md b/api-docs/docs/classes/Reader.md index 7899917..1f9928b 100644 --- a/api-docs/docs/classes/Reader.md +++ b/api-docs/docs/classes/Reader.md @@ -12,7 +12,7 @@ const reader = new Reader({ }); // In VU code (default function) -const messages = reader.consume({limit: 10}); +const messages = reader.consume({ limit: 10 }); // In teardown function reader.close(); @@ -37,8 +37,8 @@ reader.close(); #### Parameters -| Name | Type | Description | -| :------ | :------ | :------ | +| Name | Type | Description | +| :------------- | :---------------------------------------------- | :-------------------- | | `readerConfig` | [`ReaderConfig`](../interfaces/ReaderConfig.md) | Reader configuration. | #### Defined in @@ -65,7 +65,7 @@ reader.close(); [index.d.ts:325](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L325) -___ +--- ### consume @@ -76,8 +76,8 @@ Read messages from Kafka. #### Parameters -| Name | Type | Description | -| :------ | :------ | :------ | +| Name | Type | Description | +| :-------------- | :------------------------------------------------ | :--------------------- | | `consumeConfig` | [`ConsumeConfig`](../interfaces/ConsumeConfig.md) | Consume configuration. | #### Returns diff --git a/api-docs/docs/classes/SchemaRegistry.md b/api-docs/docs/classes/SchemaRegistry.md index 3954a70..3f44fc2 100644 --- a/api-docs/docs/classes/SchemaRegistry.md +++ b/api-docs/docs/classes/SchemaRegistry.md @@ -21,7 +21,7 @@ const keySchema = schemaRegistry.createSchema({ element: KEY, subject: "...", schema: "...", - schemaType: "AVRO" + schemaType: "AVRO", }); const valueSchema = schemaRegistry.createSchema({ @@ -29,7 +29,7 @@ const valueSchema = schemaRegistry.createSchema({ element: VALUE, subject: "...", schema: "...", - schemaType: "AVRO" + schemaType: "AVRO", }); // In VU code (default function) @@ -39,15 +39,15 @@ writer.produce({ key: schemaRegistry.serialize({ data: "key", schema: keySchema, - schemaType: SCHEMA_TYPE_AVRO + schemaType: SCHEMA_TYPE_AVRO, }), value: schemaRegistry.serialize({ data: "value", schema: valueSchema, - schemaType: SCHEMA_TYPE_AVRO + schemaType: SCHEMA_TYPE_AVRO, }), - } - ] + }, + ], }); ``` @@ -73,8 +73,8 @@ writer.produce({ #### Parameters -| Name | Type | Description | -| :------ | :------ | :------ | +| Name | Type | Description | +| :--------------------- | :-------------------------------------------------------------- | :----------------------------- | | `schemaRegistryConfig` | [`SchemaRegistryConfig`](../interfaces/SchemaRegistryConfig.md) | Schema Registry configuration. | #### Defined in @@ -92,8 +92,8 @@ Create or update a schema on Schema Registry. #### Parameters -| Name | Type | Description | -| :------ | :------ | :------ | +| Name | Type | Description | +| :------- | :---------------------------------- | :-------------------- | | `schema` | [`Schema`](../interfaces/Schema.md) | Schema configuration. | #### Returns @@ -106,7 +106,7 @@ Create or update a schema on Schema Registry. [index.d.ts:456](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L456) -___ +--- ### deserialize @@ -117,8 +117,8 @@ Deserializes the given data and schema into its original form. #### Parameters -| Name | Type | Description | -| :------ | :------ | :------ | +| Name | Type | Description | +| :---------- | :---------------------------------------- | :----------------------------------------------- | | `container` | [`Container`](../interfaces/Container.md) | Container including data, schema and schemaType. | #### Returns @@ -131,7 +131,7 @@ Deserializes the given data and schema into its original form. [index.d.ts:477](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L477) -___ +--- ### getSchema @@ -142,8 +142,8 @@ Get a schema from Schema Registry by version and subject. #### Parameters -| Name | Type | Description | -| :------ | :------ | :------ | +| Name | Type | Description | +| :------- | :---------------------------------- | :-------------------- | | `schema` | [`Schema`](../interfaces/Schema.md) | Schema configuration. | #### Returns @@ -156,7 +156,7 @@ Get a schema from Schema Registry by version and subject. [index.d.ts:449](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L449) -___ +--- ### getSubjectName @@ -167,8 +167,8 @@ Returns the subject name for the given SubjectNameConfig. #### Parameters -| Name | Type | Description | -| :------ | :------ | :------ | +| Name | Type | Description | +| :------------------ | :-------------------------------------------------------- | :-------------------------- | | `subjectNameConfig` | [`SubjectNameConfig`](../interfaces/SubjectNameConfig.md) | Subject name configuration. | #### Returns @@ -181,7 +181,7 @@ Returns the subject name for the given SubjectNameConfig. [index.d.ts:463](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L463) -___ +--- ### serialize @@ -192,8 +192,8 @@ Serializes the given data and schema into a byte array. #### Parameters -| Name | Type | Description | -| :------ | :------ | :------ | +| Name | Type | Description | +| :---------- | :---------------------------------------- | :----------------------------------------------- | | `container` | [`Container`](../interfaces/Container.md) | Container including data, schema and schemaType. | #### Returns diff --git a/api-docs/docs/classes/Writer.md b/api-docs/docs/classes/Writer.md index 3a496f6..fc7e5d8 100644 --- a/api-docs/docs/classes/Writer.md +++ b/api-docs/docs/classes/Writer.md @@ -18,8 +18,8 @@ writer.produce({ { key: "key", value: "value", - } - ] + }, + ], }); // In teardown function @@ -45,8 +45,8 @@ writer.close(); #### Parameters -| Name | Type | Description | -| :------ | :------ | :------ | +| Name | Type | Description | +| :------------- | :---------------------------------------------- | :-------------------- | | `writerConfig` | [`WriterConfig`](../interfaces/WriterConfig.md) | Writer configuration. | #### Defined in @@ -73,7 +73,7 @@ writer.close(); [index.d.ts:283](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L283) -___ +--- ### produce @@ -84,8 +84,8 @@ Write messages to Kafka. #### Parameters -| Name | Type | Description | -| :------ | :------ | :------ | +| Name | Type | Description | +| :-------------- | :------------------------------------------------ | :--------------------- | | `produceConfig` | [`ProduceConfig`](../interfaces/ProduceConfig.md) | Produce configuration. | #### Returns diff --git a/api-docs/docs/enums/BALANCERS.md b/api-docs/docs/enums/BALANCERS.md index b872eee..69cd4e2 100644 --- a/api-docs/docs/enums/BALANCERS.md +++ b/api-docs/docs/enums/BALANCERS.md @@ -4,57 +4,57 @@ ### Enumeration Members -- [BALANCER\_CRC32](BALANCERS.md#balancer_crc32) -- [BALANCER\_HASH](BALANCERS.md#balancer_hash) -- [BALANCER\_LEAST\_BYTES](BALANCERS.md#balancer_least_bytes) -- [BALANCER\_MURMUR2](BALANCERS.md#balancer_murmur2) -- [BALANCER\_ROUND\_ROBIN](BALANCERS.md#balancer_round_robin) +- [BALANCER_CRC32](BALANCERS.md#balancer_crc32) +- [BALANCER_HASH](BALANCERS.md#balancer_hash) +- [BALANCER_LEAST_BYTES](BALANCERS.md#balancer_least_bytes) +- [BALANCER_MURMUR2](BALANCERS.md#balancer_murmur2) +- [BALANCER_ROUND_ROBIN](BALANCERS.md#balancer_round_robin) ## Enumeration Members -### BALANCER\_CRC32 +### BALANCER_CRC32 -• **BALANCER\_CRC32** +• **BALANCER_CRC32** #### Defined in [index.d.ts:63](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L63) -___ +--- -### BALANCER\_HASH +### BALANCER_HASH -• **BALANCER\_HASH** +• **BALANCER_HASH** #### Defined in [index.d.ts:62](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L62) -___ +--- -### BALANCER\_LEAST\_BYTES +### BALANCER_LEAST_BYTES -• **BALANCER\_LEAST\_BYTES** +• **BALANCER_LEAST_BYTES** #### Defined in [index.d.ts:61](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L61) -___ +--- -### BALANCER\_MURMUR2 +### BALANCER_MURMUR2 -• **BALANCER\_MURMUR2** +• **BALANCER_MURMUR2** #### Defined in [index.d.ts:64](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L64) -___ +--- -### BALANCER\_ROUND\_ROBIN +### BALANCER_ROUND_ROBIN -• **BALANCER\_ROUND\_ROBIN** +• **BALANCER_ROUND_ROBIN** #### Defined in diff --git a/api-docs/docs/enums/COMPRESSION_CODECS.md b/api-docs/docs/enums/COMPRESSION_CODECS.md index 935c2e8..c77ce68 100644 --- a/api-docs/docs/enums/COMPRESSION_CODECS.md +++ b/api-docs/docs/enums/COMPRESSION_CODECS.md @@ -1,4 +1,4 @@ -# Enumeration: COMPRESSION\_CODECS +# Enumeration: COMPRESSION_CODECS Compression codecs for compressing messages when producing to a topic or reading from it. @@ -6,46 +6,46 @@ Compression codecs for compressing messages when producing to a topic or reading ### Enumeration Members -- [CODEC\_GZIP](COMPRESSION_CODECS.md#codec_gzip) -- [CODEC\_LZ4](COMPRESSION_CODECS.md#codec_lz4) -- [CODEC\_SNAPPY](COMPRESSION_CODECS.md#codec_snappy) -- [CODEC\_ZSTD](COMPRESSION_CODECS.md#codec_zstd) +- [CODEC_GZIP](COMPRESSION_CODECS.md#codec_gzip) +- [CODEC_LZ4](COMPRESSION_CODECS.md#codec_lz4) +- [CODEC_SNAPPY](COMPRESSION_CODECS.md#codec_snappy) +- [CODEC_ZSTD](COMPRESSION_CODECS.md#codec_zstd) ## Enumeration Members -### CODEC\_GZIP +### CODEC_GZIP -• **CODEC\_GZIP** +• **CODEC_GZIP** #### Defined in [index.d.ts:16](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L16) -___ +--- -### CODEC\_LZ4 +### CODEC_LZ4 -• **CODEC\_LZ4** +• **CODEC_LZ4** #### Defined in [index.d.ts:18](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L18) -___ +--- -### CODEC\_SNAPPY +### CODEC_SNAPPY -• **CODEC\_SNAPPY** +• **CODEC_SNAPPY** #### Defined in [index.d.ts:17](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L17) -___ +--- -### CODEC\_ZSTD +### CODEC_ZSTD -• **CODEC\_ZSTD** +• **CODEC_ZSTD** #### Defined in diff --git a/api-docs/docs/enums/ELEMENT_TYPES.md b/api-docs/docs/enums/ELEMENT_TYPES.md index 3e3e4df..d400c82 100644 --- a/api-docs/docs/enums/ELEMENT_TYPES.md +++ b/api-docs/docs/enums/ELEMENT_TYPES.md @@ -1,4 +1,4 @@ -# Enumeration: ELEMENT\_TYPES +# Enumeration: ELEMENT_TYPES ## Table of contents @@ -17,7 +17,7 @@ [index.d.ts:41](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L41) -___ +--- ### VALUE diff --git a/api-docs/docs/enums/GROUP_BALANCERS.md b/api-docs/docs/enums/GROUP_BALANCERS.md index 29d2d37..e770861 100644 --- a/api-docs/docs/enums/GROUP_BALANCERS.md +++ b/api-docs/docs/enums/GROUP_BALANCERS.md @@ -1,38 +1,38 @@ -# Enumeration: GROUP\_BALANCERS +# Enumeration: GROUP_BALANCERS ## Table of contents ### Enumeration Members -- [GROUP\_BALANCER\_RACK\_AFFINITY](GROUP_BALANCERS.md#group_balancer_rack_affinity) -- [GROUP\_BALANCER\_RANGE](GROUP_BALANCERS.md#group_balancer_range) -- [GROUP\_BALANCER\_ROUND\_ROBIN](GROUP_BALANCERS.md#group_balancer_round_robin) +- [GROUP_BALANCER_RACK_AFFINITY](GROUP_BALANCERS.md#group_balancer_rack_affinity) +- [GROUP_BALANCER_RANGE](GROUP_BALANCERS.md#group_balancer_range) +- [GROUP_BALANCER_ROUND_ROBIN](GROUP_BALANCERS.md#group_balancer_round_robin) ## Enumeration Members -### GROUP\_BALANCER\_RACK\_AFFINITY +### GROUP_BALANCER_RACK_AFFINITY -• **GROUP\_BALANCER\_RACK\_AFFINITY** +• **GROUP_BALANCER_RACK_AFFINITY** #### Defined in [index.d.ts:71](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L71) -___ +--- -### GROUP\_BALANCER\_RANGE +### GROUP_BALANCER_RANGE -• **GROUP\_BALANCER\_RANGE** +• **GROUP_BALANCER_RANGE** #### Defined in [index.d.ts:69](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L69) -___ +--- -### GROUP\_BALANCER\_ROUND\_ROBIN +### GROUP_BALANCER_ROUND_ROBIN -• **GROUP\_BALANCER\_ROUND\_ROBIN** +• **GROUP_BALANCER_ROUND_ROBIN** #### Defined in diff --git a/api-docs/docs/enums/ISOLATION_LEVEL.md b/api-docs/docs/enums/ISOLATION_LEVEL.md index 1e93858..5776e0d 100644 --- a/api-docs/docs/enums/ISOLATION_LEVEL.md +++ b/api-docs/docs/enums/ISOLATION_LEVEL.md @@ -1,27 +1,27 @@ -# Enumeration: ISOLATION\_LEVEL +# Enumeration: ISOLATION_LEVEL ## Table of contents ### Enumeration Members -- [ISOLATION\_LEVEL\_READ\_COMMITTED](ISOLATION_LEVEL.md#isolation_level_read_committed) -- [ISOLATION\_LEVEL\_READ\_UNCOMMITTED](ISOLATION_LEVEL.md#isolation_level_read_uncommitted) +- [ISOLATION_LEVEL_READ_COMMITTED](ISOLATION_LEVEL.md#isolation_level_read_committed) +- [ISOLATION_LEVEL_READ_UNCOMMITTED](ISOLATION_LEVEL.md#isolation_level_read_uncommitted) ## Enumeration Members -### ISOLATION\_LEVEL\_READ\_COMMITTED +### ISOLATION_LEVEL_READ_COMMITTED -• **ISOLATION\_LEVEL\_READ\_COMMITTED** +• **ISOLATION_LEVEL_READ_COMMITTED** #### Defined in [index.d.ts:48](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L48) -___ +--- -### ISOLATION\_LEVEL\_READ\_UNCOMMITTED +### ISOLATION_LEVEL_READ_UNCOMMITTED -• **ISOLATION\_LEVEL\_READ\_UNCOMMITTED** +• **ISOLATION_LEVEL_READ_UNCOMMITTED** #### Defined in diff --git a/api-docs/docs/enums/SASL_MECHANISMS.md b/api-docs/docs/enums/SASL_MECHANISMS.md index 048a4a2..4a3756e 100644 --- a/api-docs/docs/enums/SASL_MECHANISMS.md +++ b/api-docs/docs/enums/SASL_MECHANISMS.md @@ -1,14 +1,14 @@ -# Enumeration: SASL\_MECHANISMS +# Enumeration: SASL_MECHANISMS ## Table of contents ### Enumeration Members - [NONE](SASL_MECHANISMS.md#none) -- [SASL\_PLAIN](SASL_MECHANISMS.md#sasl_plain) -- [SASL\_SCRAM\_SHA256](SASL_MECHANISMS.md#sasl_scram_sha256) -- [SASL\_SCRAM\_SHA512](SASL_MECHANISMS.md#sasl_scram_sha512) -- [SASL\_SSL](SASL_MECHANISMS.md#sasl_ssl) +- [SASL_PLAIN](SASL_MECHANISMS.md#sasl_plain) +- [SASL_SCRAM_SHA256](SASL_MECHANISMS.md#sasl_scram_sha256) +- [SASL_SCRAM_SHA512](SASL_MECHANISMS.md#sasl_scram_sha512) +- [SASL_SSL](SASL_MECHANISMS.md#sasl_ssl) ## Enumeration Members @@ -20,41 +20,41 @@ [index.d.ts:24](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L24) -___ +--- -### SASL\_PLAIN +### SASL_PLAIN -• **SASL\_PLAIN** +• **SASL_PLAIN** #### Defined in [index.d.ts:25](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L25) -___ +--- -### SASL\_SCRAM\_SHA256 +### SASL_SCRAM_SHA256 -• **SASL\_SCRAM\_SHA256** +• **SASL_SCRAM_SHA256** #### Defined in [index.d.ts:26](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L26) -___ +--- -### SASL\_SCRAM\_SHA512 +### SASL_SCRAM_SHA512 -• **SASL\_SCRAM\_SHA512** +• **SASL_SCRAM_SHA512** #### Defined in [index.d.ts:27](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L27) -___ +--- -### SASL\_SSL +### SASL_SSL -• **SASL\_SSL** +• **SASL_SSL** #### Defined in diff --git a/api-docs/docs/enums/SCHEMA_TYPES.md b/api-docs/docs/enums/SCHEMA_TYPES.md index cfe773f..15fde5c 100644 --- a/api-docs/docs/enums/SCHEMA_TYPES.md +++ b/api-docs/docs/enums/SCHEMA_TYPES.md @@ -1,60 +1,60 @@ -# Enumeration: SCHEMA\_TYPES +# Enumeration: SCHEMA_TYPES ## Table of contents ### Enumeration Members -- [SCHEMA\_TYPE\_AVRO](SCHEMA_TYPES.md#schema_type_avro) -- [SCHEMA\_TYPE\_BYTES](SCHEMA_TYPES.md#schema_type_bytes) -- [SCHEMA\_TYPE\_JSON](SCHEMA_TYPES.md#schema_type_json) -- [SCHEMA\_TYPE\_PROTOBUF](SCHEMA_TYPES.md#schema_type_protobuf) -- [SCHEMA\_TYPE\_STRING](SCHEMA_TYPES.md#schema_type_string) +- [SCHEMA_TYPE_AVRO](SCHEMA_TYPES.md#schema_type_avro) +- [SCHEMA_TYPE_BYTES](SCHEMA_TYPES.md#schema_type_bytes) +- [SCHEMA_TYPE_JSON](SCHEMA_TYPES.md#schema_type_json) +- [SCHEMA_TYPE_PROTOBUF](SCHEMA_TYPES.md#schema_type_protobuf) +- [SCHEMA_TYPE_STRING](SCHEMA_TYPES.md#schema_type_string) ## Enumeration Members -### SCHEMA\_TYPE\_AVRO +### SCHEMA_TYPE_AVRO -• **SCHEMA\_TYPE\_AVRO** +• **SCHEMA_TYPE_AVRO** #### Defined in [index.d.ts:78](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L78) -___ +--- -### SCHEMA\_TYPE\_BYTES +### SCHEMA_TYPE_BYTES -• **SCHEMA\_TYPE\_BYTES** +• **SCHEMA_TYPE_BYTES** #### Defined in [index.d.ts:77](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L77) -___ +--- -### SCHEMA\_TYPE\_JSON +### SCHEMA_TYPE_JSON -• **SCHEMA\_TYPE\_JSON** +• **SCHEMA_TYPE_JSON** #### Defined in [index.d.ts:79](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L79) -___ +--- -### SCHEMA\_TYPE\_PROTOBUF +### SCHEMA_TYPE_PROTOBUF -• **SCHEMA\_TYPE\_PROTOBUF** +• **SCHEMA_TYPE_PROTOBUF** #### Defined in [index.d.ts:80](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L80) -___ +--- -### SCHEMA\_TYPE\_STRING +### SCHEMA_TYPE_STRING -• **SCHEMA\_TYPE\_STRING** +• **SCHEMA_TYPE_STRING** #### Defined in diff --git a/api-docs/docs/enums/SUBJECT_NAME_STRATEGY.md b/api-docs/docs/enums/SUBJECT_NAME_STRATEGY.md index 19bf970..cc9dfed 100644 --- a/api-docs/docs/enums/SUBJECT_NAME_STRATEGY.md +++ b/api-docs/docs/enums/SUBJECT_NAME_STRATEGY.md @@ -1,38 +1,38 @@ -# Enumeration: SUBJECT\_NAME\_STRATEGY +# Enumeration: SUBJECT_NAME_STRATEGY ## Table of contents ### Enumeration Members -- [RECORD\_NAME\_STRATEGY](SUBJECT_NAME_STRATEGY.md#record_name_strategy) -- [TOPIC\_NAME\_STRATEGY](SUBJECT_NAME_STRATEGY.md#topic_name_strategy) -- [TOPIC\_RECORD\_NAME\_STRATEGY](SUBJECT_NAME_STRATEGY.md#topic_record_name_strategy) +- [RECORD_NAME_STRATEGY](SUBJECT_NAME_STRATEGY.md#record_name_strategy) +- [TOPIC_NAME_STRATEGY](SUBJECT_NAME_STRATEGY.md#topic_name_strategy) +- [TOPIC_RECORD_NAME_STRATEGY](SUBJECT_NAME_STRATEGY.md#topic_record_name_strategy) ## Enumeration Members -### RECORD\_NAME\_STRATEGY +### RECORD_NAME_STRATEGY -• **RECORD\_NAME\_STRATEGY** +• **RECORD_NAME_STRATEGY** #### Defined in [index.d.ts:54](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L54) -___ +--- -### TOPIC\_NAME\_STRATEGY +### TOPIC_NAME_STRATEGY -• **TOPIC\_NAME\_STRATEGY** +• **TOPIC_NAME_STRATEGY** #### Defined in [index.d.ts:53](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L53) -___ +--- -### TOPIC\_RECORD\_NAME\_STRATEGY +### TOPIC_RECORD_NAME_STRATEGY -• **TOPIC\_RECORD\_NAME\_STRATEGY** +• **TOPIC_RECORD_NAME_STRATEGY** #### Defined in diff --git a/api-docs/docs/enums/TLS_VERSIONS.md b/api-docs/docs/enums/TLS_VERSIONS.md index 095e696..d261738 100644 --- a/api-docs/docs/enums/TLS_VERSIONS.md +++ b/api-docs/docs/enums/TLS_VERSIONS.md @@ -1,49 +1,49 @@ -# Enumeration: TLS\_VERSIONS +# Enumeration: TLS_VERSIONS ## Table of contents ### Enumeration Members -- [TLS\_1\_0](TLS_VERSIONS.md#tls_1_0) -- [TLS\_1\_1](TLS_VERSIONS.md#tls_1_1) -- [TLS\_1\_2](TLS_VERSIONS.md#tls_1_2) -- [TLS\_1\_3](TLS_VERSIONS.md#tls_1_3) +- [TLS_1_0](TLS_VERSIONS.md#tls_1_0) +- [TLS_1_1](TLS_VERSIONS.md#tls_1_1) +- [TLS_1_2](TLS_VERSIONS.md#tls_1_2) +- [TLS_1_3](TLS_VERSIONS.md#tls_1_3) ## Enumeration Members -### TLS\_1\_0 +### TLS_1_0 -• **TLS\_1\_0** +• **TLS_1_0** #### Defined in [index.d.ts:33](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L33) -___ +--- -### TLS\_1\_1 +### TLS_1_1 -• **TLS\_1\_1** +• **TLS_1_1** #### Defined in [index.d.ts:34](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L34) -___ +--- -### TLS\_1\_2 +### TLS_1_2 -• **TLS\_1\_2** +• **TLS_1_2** #### Defined in [index.d.ts:35](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L35) -___ +--- -### TLS\_1\_3 +### TLS_1_3 -• **TLS\_1\_3** +• **TLS_1_3** #### Defined in diff --git a/api-docs/docs/interfaces/BasicAuth.md b/api-docs/docs/interfaces/BasicAuth.md index 3b461dc..d20f9ad 100644 --- a/api-docs/docs/interfaces/BasicAuth.md +++ b/api-docs/docs/interfaces/BasicAuth.md @@ -17,7 +17,7 @@ [index.d.ts:136](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L136) -___ +--- ### username diff --git a/api-docs/docs/interfaces/ConfigEntry.md b/api-docs/docs/interfaces/ConfigEntry.md index 7511dc9..c07086b 100644 --- a/api-docs/docs/interfaces/ConfigEntry.md +++ b/api-docs/docs/interfaces/ConfigEntry.md @@ -17,7 +17,7 @@ [index.d.ts:203](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L203) -___ +--- ### configValue diff --git a/api-docs/docs/interfaces/ConnectionConfig.md b/api-docs/docs/interfaces/ConnectionConfig.md index fd3b406..f9b8c3c 100644 --- a/api-docs/docs/interfaces/ConnectionConfig.md +++ b/api-docs/docs/interfaces/ConnectionConfig.md @@ -18,7 +18,7 @@ [index.d.ts:190](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L190) -___ +--- ### sasl @@ -28,7 +28,7 @@ ___ [index.d.ts:191](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L191) -___ +--- ### tls diff --git a/api-docs/docs/interfaces/Container.md b/api-docs/docs/interfaces/Container.md index aaa83c3..249b28e 100644 --- a/api-docs/docs/interfaces/Container.md +++ b/api-docs/docs/interfaces/Container.md @@ -18,7 +18,7 @@ [index.d.ts:231](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L231) -___ +--- ### schema @@ -28,7 +28,7 @@ ___ [index.d.ts:232](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L232) -___ +--- ### schemaType diff --git a/api-docs/docs/interfaces/Message.md b/api-docs/docs/interfaces/Message.md index afbdbe2..e07d0a2 100644 --- a/api-docs/docs/interfaces/Message.md +++ b/api-docs/docs/interfaces/Message.md @@ -27,7 +27,7 @@ Message format for producing messages to a topic. [index.d.ts:129](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L129) -___ +--- ### highwaterMark @@ -37,7 +37,7 @@ ___ [index.d.ts:126](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L126) -___ +--- ### key @@ -47,7 +47,7 @@ ___ [index.d.ts:127](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L127) -___ +--- ### offset @@ -57,7 +57,7 @@ ___ [index.d.ts:125](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L125) -___ +--- ### partition @@ -67,7 +67,7 @@ ___ [index.d.ts:124](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L124) -___ +--- ### time @@ -77,7 +77,7 @@ ___ [index.d.ts:130](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L130) -___ +--- ### topic @@ -87,7 +87,7 @@ ___ [index.d.ts:123](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L123) -___ +--- ### value diff --git a/api-docs/docs/interfaces/ReaderConfig.md b/api-docs/docs/interfaces/ReaderConfig.md index 92646c8..c56a262 100644 --- a/api-docs/docs/interfaces/ReaderConfig.md +++ b/api-docs/docs/interfaces/ReaderConfig.md @@ -43,7 +43,7 @@ [index.d.ts:153](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L153) -___ +--- ### commitInterval @@ -53,7 +53,7 @@ ___ [index.d.ts:165](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L165) -___ +--- ### connectLogger @@ -63,7 +63,7 @@ ___ [index.d.ts:175](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L175) -___ +--- ### groupBalancers @@ -73,7 +73,7 @@ ___ [index.d.ts:163](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L163) -___ +--- ### groupID @@ -83,7 +83,7 @@ ___ [index.d.ts:154](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L154) -___ +--- ### groupTopics @@ -93,7 +93,7 @@ ___ [index.d.ts:155](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L155) -___ +--- ### heartbeatInterval @@ -103,7 +103,7 @@ ___ [index.d.ts:164](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L164) -___ +--- ### isolationLevel @@ -113,7 +113,7 @@ ___ [index.d.ts:177](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L177) -___ +--- ### joinGroupBackoff @@ -123,7 +123,7 @@ ___ [index.d.ts:170](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L170) -___ +--- ### maxAttempts @@ -133,7 +133,7 @@ ___ [index.d.ts:176](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L176) -___ +--- ### maxBytes @@ -143,7 +143,7 @@ ___ [index.d.ts:160](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L160) -___ +--- ### maxWait @@ -153,7 +153,7 @@ ___ [index.d.ts:161](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L161) -___ +--- ### minBytes @@ -163,7 +163,7 @@ ___ [index.d.ts:159](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L159) -___ +--- ### offset @@ -173,7 +173,7 @@ ___ [index.d.ts:178](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L178) -___ +--- ### partition @@ -183,7 +183,7 @@ ___ [index.d.ts:157](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L157) -___ +--- ### partitionWatchInterval @@ -193,7 +193,7 @@ ___ [index.d.ts:166](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L166) -___ +--- ### queueCapacity @@ -203,7 +203,7 @@ ___ [index.d.ts:158](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L158) -___ +--- ### readBackoffMax @@ -213,7 +213,7 @@ ___ [index.d.ts:174](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L174) -___ +--- ### readBackoffMin @@ -223,7 +223,7 @@ ___ [index.d.ts:173](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L173) -___ +--- ### readLagInterval @@ -233,7 +233,7 @@ ___ [index.d.ts:162](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L162) -___ +--- ### rebalanceTimeout @@ -243,7 +243,7 @@ ___ [index.d.ts:169](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L169) -___ +--- ### retentionTime @@ -253,7 +253,7 @@ ___ [index.d.ts:171](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L171) -___ +--- ### sasl @@ -263,7 +263,7 @@ ___ [index.d.ts:179](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L179) -___ +--- ### sessionTimeout @@ -273,7 +273,7 @@ ___ [index.d.ts:168](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L168) -___ +--- ### startOffset @@ -283,7 +283,7 @@ ___ [index.d.ts:172](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L172) -___ +--- ### tls @@ -293,7 +293,7 @@ ___ [index.d.ts:180](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L180) -___ +--- ### topic @@ -303,7 +303,7 @@ ___ [index.d.ts:156](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L156) -___ +--- ### watchPartitionChanges diff --git a/api-docs/docs/interfaces/ReplicaAssignment.md b/api-docs/docs/interfaces/ReplicaAssignment.md index 0b41ed1..b78da87 100644 --- a/api-docs/docs/interfaces/ReplicaAssignment.md +++ b/api-docs/docs/interfaces/ReplicaAssignment.md @@ -17,7 +17,7 @@ [index.d.ts:197](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L197) -___ +--- ### replicas diff --git a/api-docs/docs/interfaces/SASLConfig.md b/api-docs/docs/interfaces/SASLConfig.md index ac0cbc0..4c758c2 100644 --- a/api-docs/docs/interfaces/SASLConfig.md +++ b/api-docs/docs/interfaces/SASLConfig.md @@ -18,7 +18,7 @@ [index.d.ts:87](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L87) -___ +--- ### password @@ -28,7 +28,7 @@ ___ [index.d.ts:86](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L86) -___ +--- ### username diff --git a/api-docs/docs/interfaces/Schema.md b/api-docs/docs/interfaces/Schema.md index d97d805..bfa7ec0 100644 --- a/api-docs/docs/interfaces/Schema.md +++ b/api-docs/docs/interfaces/Schema.md @@ -18,7 +18,7 @@ [index.d.ts:218](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L218) -___ +--- ### schema @@ -28,7 +28,7 @@ ___ [index.d.ts:219](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L219) -___ +--- ### schemaType diff --git a/api-docs/docs/interfaces/SchemaRegistryConfig.md b/api-docs/docs/interfaces/SchemaRegistryConfig.md index ab92dc6..e984619 100644 --- a/api-docs/docs/interfaces/SchemaRegistryConfig.md +++ b/api-docs/docs/interfaces/SchemaRegistryConfig.md @@ -18,7 +18,7 @@ [index.d.ts:142](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L142) -___ +--- ### tls @@ -28,7 +28,7 @@ ___ [index.d.ts:143](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L143) -___ +--- ### url diff --git a/api-docs/docs/interfaces/SubjectNameConfig.md b/api-docs/docs/interfaces/SubjectNameConfig.md index 72ea67d..b4469ee 100644 --- a/api-docs/docs/interfaces/SubjectNameConfig.md +++ b/api-docs/docs/interfaces/SubjectNameConfig.md @@ -19,7 +19,7 @@ [index.d.ts:226](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L226) -___ +--- ### schema @@ -29,7 +29,7 @@ ___ [index.d.ts:224](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L224) -___ +--- ### subjectNameStrategy @@ -39,7 +39,7 @@ ___ [index.d.ts:227](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L227) -___ +--- ### topic diff --git a/api-docs/docs/interfaces/TLSConfig.md b/api-docs/docs/interfaces/TLSConfig.md index 8a067a5..fa40e46 100644 --- a/api-docs/docs/interfaces/TLSConfig.md +++ b/api-docs/docs/interfaces/TLSConfig.md @@ -21,7 +21,7 @@ [index.d.ts:95](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L95) -___ +--- ### clientKeyPem @@ -31,7 +31,7 @@ ___ [index.d.ts:96](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L96) -___ +--- ### enableTls @@ -41,7 +41,7 @@ ___ [index.d.ts:92](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L92) -___ +--- ### insecureSkipTlsVerify @@ -51,7 +51,7 @@ ___ [index.d.ts:93](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L93) -___ +--- ### minVersion @@ -61,7 +61,7 @@ ___ [index.d.ts:94](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L94) -___ +--- ### serverCertPem diff --git a/api-docs/docs/interfaces/TopicConfig.md b/api-docs/docs/interfaces/TopicConfig.md index 6608cc2..78007e9 100644 --- a/api-docs/docs/interfaces/TopicConfig.md +++ b/api-docs/docs/interfaces/TopicConfig.md @@ -20,7 +20,7 @@ [index.d.ts:213](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L213) -___ +--- ### numPartitions @@ -30,7 +30,7 @@ ___ [index.d.ts:210](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L210) -___ +--- ### replicaAssignments @@ -40,7 +40,7 @@ ___ [index.d.ts:212](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L212) -___ +--- ### replicationFactor @@ -50,7 +50,7 @@ ___ [index.d.ts:211](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L211) -___ +--- ### topic diff --git a/api-docs/docs/interfaces/WriterConfig.md b/api-docs/docs/interfaces/WriterConfig.md index 3f409ab..95a307d 100644 --- a/api-docs/docs/interfaces/WriterConfig.md +++ b/api-docs/docs/interfaces/WriterConfig.md @@ -29,7 +29,7 @@ [index.d.ts:104](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L104) -___ +--- ### balancer @@ -39,7 +39,7 @@ ___ [index.d.ts:105](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L105) -___ +--- ### batchBytes @@ -49,7 +49,7 @@ ___ [index.d.ts:108](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L108) -___ +--- ### batchSize @@ -59,7 +59,7 @@ ___ [index.d.ts:107](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L107) -___ +--- ### batchTimeout @@ -69,7 +69,7 @@ ___ [index.d.ts:109](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L109) -___ +--- ### brokers @@ -79,7 +79,7 @@ ___ [index.d.ts:102](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L102) -___ +--- ### compression @@ -89,7 +89,7 @@ ___ [index.d.ts:112](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L112) -___ +--- ### connectLogger @@ -99,7 +99,7 @@ ___ [index.d.ts:115](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L115) -___ +--- ### maxAttempts @@ -109,7 +109,7 @@ ___ [index.d.ts:106](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L106) -___ +--- ### readTimeout @@ -119,7 +119,7 @@ ___ [index.d.ts:110](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L110) -___ +--- ### sasl @@ -129,7 +129,7 @@ ___ [index.d.ts:113](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L113) -___ +--- ### tls @@ -139,7 +139,7 @@ ___ [index.d.ts:114](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L114) -___ +--- ### topic @@ -149,7 +149,7 @@ ___ [index.d.ts:103](https://github.com/mostafa/xk6-kafka/blob/main/api-docs/index.d.ts#L103) -___ +--- ### writeTimeout diff --git a/api-docs/index.d.ts b/api-docs/index.d.ts index 5adfc9d..5291935 100644 --- a/api-docs/index.d.ts +++ b/api-docs/index.d.ts @@ -13,106 +13,106 @@ /** Compression codecs for compressing messages when producing to a topic or reading from it. */ export enum COMPRESSION_CODECS { - CODEC_GZIP = "gzip", - CODEC_SNAPPY = "snappy", - CODEC_LZ4 = "lz4", - CODEC_ZSTD = "zstd", + CODEC_GZIP = "gzip", + CODEC_SNAPPY = "snappy", + CODEC_LZ4 = "lz4", + CODEC_ZSTD = "zstd", } /* SASL mechanisms for authenticating to Kafka. */ export enum SASL_MECHANISMS { - NONE = "none", - SASL_PLAIN = "sasl_plain", - SASL_SCRAM_SHA256 = "sasl_scram_sha256", - SASL_SCRAM_SHA512 = "sasl_scram_sha512", - SASL_SSL = "sasl_ssl", + NONE = "none", + SASL_PLAIN = "sasl_plain", + SASL_SCRAM_SHA256 = "sasl_scram_sha256", + SASL_SCRAM_SHA512 = "sasl_scram_sha512", + SASL_SSL = "sasl_ssl", } /* TLS versions for creating a secure communication channel with Kafka. */ export enum TLS_VERSIONS { - TLS_1_0 = "tlsv1.0", - TLS_1_1 = "tlsv1.1", - TLS_1_2 = "tlsv1.2", - TLS_1_3 = "tlsv1.3", + TLS_1_0 = "tlsv1.0", + TLS_1_1 = "tlsv1.1", + TLS_1_2 = "tlsv1.2", + TLS_1_3 = "tlsv1.3", } /* Element types for publishing schemas to Schema Registry. */ export enum ELEMENT_TYPES { - KEY = "KEY", - VALUE = "VALUE", + KEY = "KEY", + VALUE = "VALUE", } /* Isolation levels controls the visibility of transactional records. */ export enum ISOLATION_LEVEL { - ISOLATION_LEVEL_READ_UNCOMMITTED = "isolation_level_read_uncommitted", - ISOLATION_LEVEL_READ_COMMITTED = "isolation_level_read_committed", + ISOLATION_LEVEL_READ_UNCOMMITTED = "isolation_level_read_uncommitted", + ISOLATION_LEVEL_READ_COMMITTED = "isolation_level_read_committed", } /* Subject name strategy for storing a schema in Schema Registry. */ export enum SUBJECT_NAME_STRATEGY { - TOPIC_NAME_STRATEGY = "TopicNameStrategy", - RECORD_NAME_STRATEGY = "RecordNameStrategy", - TOPIC_RECORD_NAME_STRATEGY = "TopicRecordNameStrategy", + TOPIC_NAME_STRATEGY = "TopicNameStrategy", + RECORD_NAME_STRATEGY = "RecordNameStrategy", + TOPIC_RECORD_NAME_STRATEGY = "TopicRecordNameStrategy", } /* Balancers for distributing messages to partitions. */ export enum BALANCERS { - BALANCER_ROUND_ROBIN = "balancer_roundrobin", - BALANCER_LEAST_BYTES = "balancer_leastbytes", - BALANCER_HASH = "balancer_hash", - BALANCER_CRC32 = "balancer_crc32", - BALANCER_MURMUR2 = "balancer_murmur2", + BALANCER_ROUND_ROBIN = "balancer_roundrobin", + BALANCER_LEAST_BYTES = "balancer_leastbytes", + BALANCER_HASH = "balancer_hash", + BALANCER_CRC32 = "balancer_crc32", + BALANCER_MURMUR2 = "balancer_murmur2", } /* Consumer group balancing strategies for consuming messages. */ export enum GROUP_BALANCERS { - GROUP_BALANCER_RANGE = "group_balancer_range", - GROUP_BALANCER_ROUND_ROBIN = "group_balancer_round_robin", - GROUP_BALANCER_RACK_AFFINITY = "group_balancer_rack_affinity", + GROUP_BALANCER_RANGE = "group_balancer_range", + GROUP_BALANCER_ROUND_ROBIN = "group_balancer_round_robin", + GROUP_BALANCER_RACK_AFFINITY = "group_balancer_rack_affinity", } /* Schema types used in identifying schema and data type in serdes. */ export enum SCHEMA_TYPES { - SCHEMA_TYPE_STRING = "STRING", - SCHEMA_TYPE_BYTES = "BYTES", - SCHEMA_TYPE_AVRO = "AVRO", - SCHEMA_TYPE_JSON = "JSON", - SCHEMA_TYPE_PROTOBUF = "PROTOBUF", + SCHEMA_TYPE_STRING = "STRING", + SCHEMA_TYPE_BYTES = "BYTES", + SCHEMA_TYPE_AVRO = "AVRO", + SCHEMA_TYPE_JSON = "JSON", + SCHEMA_TYPE_PROTOBUF = "PROTOBUF", } /* SASL configurations for authenticating to Kafka. */ export interface SASLConfig { - username: string; - password: string; - algorithm: SASL_MECHANISMS; + username: string; + password: string; + algorithm: SASL_MECHANISMS; } /* TLS configurations for creating a secure communication channel with Kafka. */ export interface TLSConfig { - enableTls: boolean; - insecureSkipTlsVerify: boolean; - minVersion: TLS_VERSIONS; - clientCertPem: string; - clientKeyPem: string; - serverCertPem: string; + enableTls: boolean; + insecureSkipTlsVerify: boolean; + minVersion: TLS_VERSIONS; + clientCertPem: string; + clientKeyPem: string; + serverCertPem: string; } /* Writer configurations for producing messages to a topic. */ export interface WriterConfig { - brokers: string[]; - topic: string; - autoCreateTopic: boolean; - balancer: BALANCERS; - maxAttempts: number; - batchSize: number; - batchBytes: number; - batchTimeout: number; - readTimeout: number; - writeTimeout: number; - compression: COMPRESSION_CODECS; - sasl: SASLConfig; - tls: TLSConfig; - connectLogger: boolean; + brokers: string[]; + topic: string; + autoCreateTopic: boolean; + balancer: BALANCERS; + maxAttempts: number; + batchSize: number; + batchBytes: number; + batchTimeout: number; + readTimeout: number; + writeTimeout: number; + compression: COMPRESSION_CODECS; + sasl: SASLConfig; + tls: TLSConfig; + connectLogger: boolean; } /** @@ -120,117 +120,117 @@ export interface WriterConfig { * @note: The message format will be adopted by the reader at some point. */ export interface Message { - topic: string; - partition: number; - offset: number; - highwaterMark: number; - key: Uint8Array; - value: Uint8Array; - headers: Map; - time: Date; + topic: string; + partition: number; + offset: number; + highwaterMark: number; + key: Uint8Array; + value: Uint8Array; + headers: Map; + time: Date; } /* Basic authentication for connecting to Schema Registry. */ export interface BasicAuth { - username: string; - password: string; + username: string; + password: string; } /* Schema Registry configurations for creating a possible secure communication channel with Schema Registry for storing and retrieving schemas. */ export interface SchemaRegistryConfig { - url: string; - basicAuth: BasicAuth; - tls: TLSConfig; + url: string; + basicAuth: BasicAuth; + tls: TLSConfig; } /* Configuration for producing messages to a topic. */ export interface ProduceConfig { - messages: Message[]; + messages: Message[]; } /* Configuration for creating a Reader instance. */ export interface ReaderConfig { - brokers: string[]; - groupID: string; - groupTopics: string[]; - topic: string; - partition: number; - queueCapacity: number; - minBytes: number; - maxBytes: number; - maxWait: number; - readLagInterval: number; - groupBalancers: GROUP_BALANCERS[]; - heartbeatInterval: number; - commitInterval: number; - partitionWatchInterval: number; - watchPartitionChanges: boolean; - sessionTimeout: number; - rebalanceTimeout: number; - joinGroupBackoff: number; - retentionTime: number; - startOffset: number; - readBackoffMin: number; - readBackoffMax: number; - connectLogger: boolean; - maxAttempts: number; - isolationLevel: ISOLATION_LEVEL; - offset: number; - sasl: SASLConfig; - tls: TLSConfig; + brokers: string[]; + groupID: string; + groupTopics: string[]; + topic: string; + partition: number; + queueCapacity: number; + minBytes: number; + maxBytes: number; + maxWait: number; + readLagInterval: number; + groupBalancers: GROUP_BALANCERS[]; + heartbeatInterval: number; + commitInterval: number; + partitionWatchInterval: number; + watchPartitionChanges: boolean; + sessionTimeout: number; + rebalanceTimeout: number; + joinGroupBackoff: number; + retentionTime: number; + startOffset: number; + readBackoffMin: number; + readBackoffMax: number; + connectLogger: boolean; + maxAttempts: number; + isolationLevel: ISOLATION_LEVEL; + offset: number; + sasl: SASLConfig; + tls: TLSConfig; } /* Configuration for Consume method. */ export interface ConsumeConfig { - limit: number; + limit: number; } /* Configuration for creating a Connector instance for working with topics. */ export interface ConnectionConfig { - address: string; - sasl: SASLConfig; - tls: TLSConfig; + address: string; + sasl: SASLConfig; + tls: TLSConfig; } /* ReplicaAssignment among kafka brokers for this topic partitions. */ export interface ReplicaAssignment { - partition: number; - replicas: number[]; + partition: number; + replicas: number[]; } /* ConfigEntry holds topic level configuration for topic to be set. */ export interface ConfigEntry { - configName: string; - configValue: string; + configName: string; + configValue: string; } /* TopicConfig for creating a new topic. */ export interface TopicConfig { - topic: string; - numPartitions: number; - replicationFactor: number; - replicaAssignments: ReplicaAssignment[]; - configEntries: ConfigEntry[]; + topic: string; + numPartitions: number; + replicationFactor: number; + replicaAssignments: ReplicaAssignment[]; + configEntries: ConfigEntry[]; } /* Schema for Schema Registry client to help with serdes. */ export interface Schema { - data: any; - schema: string; - schemaType: SCHEMA_TYPES; + data: any; + schema: string; + schemaType: SCHEMA_TYPES; } export interface SubjectNameConfig { - schema: String; - topic: String; - element: ELEMENT_TYPES; - subjectNameStrategy: SUBJECT_NAME_STRATEGY; + schema: String; + topic: String; + element: ELEMENT_TYPES; + subjectNameStrategy: SUBJECT_NAME_STRATEGY; } export interface Container { - data: any; - schema: Schema; - schemaType: SCHEMA_TYPES; + data: any; + schema: Schema; + schemaType: SCHEMA_TYPES; } /** @@ -261,26 +261,26 @@ export interface Container { * ``` */ export class Writer { - /** - * @constructor - * Create a new Writer. - * @param {WriterConfig} writerConfig - Writer configuration. - * @returns {Writer} - Writer instance. - */ - constructor(writerConfig: WriterConfig); - /** - * @method - * Write messages to Kafka. - * @param {ProduceConfig} produceConfig - Produce configuration. - * @returns {void} - Nothing. - */ - produce(produceConfig: ProduceConfig): void; - /** - * @destructor - * @description Close the writer. - * @returns {void} - Nothing. - */ - close(): void; + /** + * @constructor + * Create a new Writer. + * @param {WriterConfig} writerConfig - Writer configuration. + * @returns {Writer} - Writer instance. + */ + constructor(writerConfig: WriterConfig); + /** + * @method + * Write messages to Kafka. + * @param {ProduceConfig} produceConfig - Produce configuration. + * @returns {void} - Nothing. + */ + produce(produceConfig: ProduceConfig): void; + /** + * @destructor + * @description Close the writer. + * @returns {void} - Nothing. + */ + close(): void; } /** @@ -303,26 +303,26 @@ export class Writer { * ``` */ export class Reader { - /** - * @constructor - * Create a new Reader. - * @param {ReaderConfig} readerConfig - Reader configuration. - * @returns {Reader} - Reader instance. - */ - constructor(readerConfig: ReaderConfig); - /** - * @method - * Read messages from Kafka. - * @param {ConsumeConfig} consumeConfig - Consume configuration. - * @returns {Message[]} - Messages. - */ - consume(consumeConfig: ConsumeConfig): Message[]; - /** - * @destructor - * @description Close the reader. - * @returns {void} - Nothing. - */ - close(): void; + /** + * @constructor + * Create a new Reader. + * @param {ReaderConfig} readerConfig - Reader configuration. + * @returns {Reader} - Reader instance. + */ + constructor(readerConfig: ReaderConfig); + /** + * @method + * Read messages from Kafka. + * @param {ConsumeConfig} consumeConfig - Consume configuration. + * @returns {Message[]} - Messages. + */ + consume(consumeConfig: ConsumeConfig): Message[]; + /** + * @destructor + * @description Close the reader. + * @returns {void} - Nothing. + */ + close(): void; } /** @@ -344,42 +344,41 @@ export class Reader { * ``` */ export class Connection { - /** - * @constructor - * Create a new Connection. - * @param {ConnectionConfig} connectionConfig - Connection configuration. - * @returns {Connection} - Connection instance. - */ - constructor(connectionConfig: ConnectionConfig); - /** - * @method - * Create a new topic. - * @param {TopicConfig} topicConfig - Topic configuration. - * @returns {void} - Nothing. - */ - createTopic(topicConfig: TopicConfig): void; - /** - * @method - * Delete a topic. - * @param {string} topic - Topic name. - * @returns {void} - Nothing. - */ - deleteTopic(topic: string): void; - /** - * @method - * List topics. - * @returns {string[]} - Topics. - */ - listTopics(): string[]; - /** - * @destructor - * @description Close the connection. - * @returns {void} - Nothing. - */ - close(): void; + /** + * @constructor + * Create a new Connection. + * @param {ConnectionConfig} connectionConfig - Connection configuration. + * @returns {Connection} - Connection instance. + */ + constructor(connectionConfig: ConnectionConfig); + /** + * @method + * Create a new topic. + * @param {TopicConfig} topicConfig - Topic configuration. + * @returns {void} - Nothing. + */ + createTopic(topicConfig: TopicConfig): void; + /** + * @method + * Delete a topic. + * @param {string} topic - Topic name. + * @returns {void} - Nothing. + */ + deleteTopic(topic: string): void; + /** + * @method + * List topics. + * @returns {string[]} - Topics. + */ + listTopics(): string[]; + /** + * @destructor + * @description Close the connection. + * @returns {void} - Nothing. + */ + close(): void; } - /** * @class * @classdesc Schema Registry is a client for Schema Registry and handles serdes. @@ -387,7 +386,7 @@ export class Connection { * * ```javascript * // In init context - * const writer = new Writer({ + * const writer = new Writer({ * brokers: ["localhost:9092"], * topic: "my-topic", * autoCreateTopic: true, @@ -432,47 +431,47 @@ export class Connection { * }); * ``` */ - export class SchemaRegistry { - /** - * @constructor - * Create a new SchemaRegistry client. - * @param {SchemaRegistryConfig} schemaRegistryConfig - Schema Registry configuration. - * @returns {SchemaRegistry} - SchemaRegistry instance. - */ - constructor(schemaRegistryConfig: SchemaRegistryConfig); - /** - * @method - * Get a schema from Schema Registry by version and subject. - * @param {Schema} schema - Schema configuration. - * @returns {Schema} - Schema. - */ - getSchema(schema: Schema): Schema; - /** - * @method - * Create or update a schema on Schema Registry. - * @param {Schema} schema - Schema configuration. - * @returns {Schema} - Schema. - */ - createSchema(schema: Schema): Schema; - /** - * @method - * Returns the subject name for the given SubjectNameConfig. - * @param {SubjectNameConfig} subjectNameConfig - Subject name configuration. - * @returns {string} - Subject name. - */ - getSubjectName(subjectNameConfig: SubjectNameConfig): string; - /** - * @method - * Serializes the given data and schema into a byte array. - * @param {Container} container - Container including data, schema and schemaType. - * @returns {Uint8Array} - Serialized data as byte array. - */ - serialize(container: Container): Uint8Array; - /** - * @method - * Deserializes the given data and schema into its original form. - * @param {Container} container - Container including data, schema and schemaType. - * @returns {any} - Deserialized data as string, byte array or JSON object. - */ - deserialize(container: Container): any; +export class SchemaRegistry { + /** + * @constructor + * Create a new SchemaRegistry client. + * @param {SchemaRegistryConfig} schemaRegistryConfig - Schema Registry configuration. + * @returns {SchemaRegistry} - SchemaRegistry instance. + */ + constructor(schemaRegistryConfig: SchemaRegistryConfig); + /** + * @method + * Get a schema from Schema Registry by version and subject. + * @param {Schema} schema - Schema configuration. + * @returns {Schema} - Schema. + */ + getSchema(schema: Schema): Schema; + /** + * @method + * Create or update a schema on Schema Registry. + * @param {Schema} schema - Schema configuration. + * @returns {Schema} - Schema. + */ + createSchema(schema: Schema): Schema; + /** + * @method + * Returns the subject name for the given SubjectNameConfig. + * @param {SubjectNameConfig} subjectNameConfig - Subject name configuration. + * @returns {string} - Subject name. + */ + getSubjectName(subjectNameConfig: SubjectNameConfig): string; + /** + * @method + * Serializes the given data and schema into a byte array. + * @param {Container} container - Container including data, schema and schemaType. + * @returns {Uint8Array} - Serialized data as byte array. + */ + serialize(container: Container): Uint8Array; + /** + * @method + * Deserializes the given data and schema into its original form. + * @param {Container} container - Container including data, schema and schemaType. + * @returns {any} - Deserialized data as string, byte array or JSON object. + */ + deserialize(container: Container): any; } diff --git a/api-docs/package.json b/api-docs/package.json index d724822..eccfb6a 100644 --- a/api-docs/package.json +++ b/api-docs/package.json @@ -1,19 +1,19 @@ { - "name": "xk6-kafka", - "version": "0.11.0", - "description": "xk6-kafka is a k6 extension to load test Apache Kafka", - "main": "index.d.ts", - "repository": "git@github.com:mostafa/xk6-kafka.git", - "author": "Mostafa Moradian ", - "license": "Apache-2.0", - "scripts": { - "generate-docs": "yarn exec typedoc index.d.ts" - }, - "devDependencies": { - "typedoc": "^0.22.17", - "typedoc-plugin-markdown": "^3.12.1" - }, - "dependencies": { - "typescript": "^4.7.3" - } -} \ No newline at end of file + "name": "xk6-kafka", + "version": "0.11.0", + "description": "xk6-kafka is a k6 extension to load test Apache Kafka", + "main": "index.d.ts", + "repository": "git@github.com:mostafa/xk6-kafka.git", + "author": "Mostafa Moradian ", + "license": "Apache-2.0", + "scripts": { + "generate-docs": "yarn exec typedoc index.d.ts" + }, + "devDependencies": { + "typedoc": "^0.22.17", + "typedoc-plugin-markdown": "^3.12.1" + }, + "dependencies": { + "typescript": "^4.7.3" + } +} diff --git a/api-docs/tsconfig.json b/api-docs/tsconfig.json index 385f8c6..9817f06 100644 --- a/api-docs/tsconfig.json +++ b/api-docs/tsconfig.json @@ -1,9 +1,9 @@ { - "compilerOptions": { - "target": "es2015", - "module": "commonjs", - "rootDir": ".", - "strict": true - }, - "exclude": ["node_modules"] -} \ No newline at end of file + "compilerOptions": { + "target": "es2015", + "module": "commonjs", + "rootDir": ".", + "strict": true + }, + "exclude": ["node_modules"] +} diff --git a/api-docs/typedoc.json b/api-docs/typedoc.json index 191e91f..c32943d 100644 --- a/api-docs/typedoc.json +++ b/api-docs/typedoc.json @@ -1,9 +1,9 @@ { - "theme": "markdown", - "hideBreadcrumbs": true, - "disableSources": false, - "entryPoints": ["index.d.ts"], - "plugin": ["typedoc-plugin-markdown"], - "readme": "none", - "gitRevision": "main" -} \ No newline at end of file + "theme": "markdown", + "hideBreadcrumbs": true, + "disableSources": false, + "entryPoints": ["index.d.ts"], + "plugin": ["typedoc-plugin-markdown"], + "readme": "none", + "gitRevision": "main" +} diff --git a/scripts/test_avro_no_schema_registry.js b/scripts/test_avro_no_schema_registry.js index 014d9c6..72bd6d6 100644 --- a/scripts/test_avro_no_schema_registry.js +++ b/scripts/test_avro_no_schema_registry.js @@ -6,76 +6,82 @@ without any associated key. */ import { check } from "k6"; -import { Writer, Reader, Connection, SchemaRegistry, SCHEMA_TYPE_AVRO } from "k6/x/kafka"; // import kafka extension +import { + Writer, + Reader, + Connection, + SchemaRegistry, + SCHEMA_TYPE_AVRO, +} from "k6/x/kafka"; // import kafka extension const brokers = ["localhost:9092"]; const topic = "xk6_kafka_avro_topic"; const writer = new Writer({ - brokers: brokers, - topic: topic, - autoCreateTopic: true, + brokers: brokers, + topic: topic, + autoCreateTopic: true, }); const reader = new Reader({ - brokers: brokers, - topic: topic, + brokers: brokers, + topic: topic, }); const connection = new Connection({ - address: brokers[0], + address: brokers[0], }); const schemaRegistry = new SchemaRegistry(); if (__VU == 0) { - connection.createTopic({ topic: topic }); + connection.createTopic({ topic: topic }); } const valueSchema = JSON.stringify({ - type: "record", - name: "Value", - namespace: "dev.mostafa.xk6.kafka", - fields: [ - { - name: "name", - type: "string", - }, - ], + type: "record", + name: "Value", + namespace: "dev.mostafa.xk6.kafka", + fields: [ + { + name: "name", + type: "string", + }, + ], }); export default function () { - for (let index = 0; index < 100; index++) { - let messages = [ - { - value: schemaRegistry.serialize({ - data: { - name: "xk6-kafka", - }, - schema: { schema: valueSchema }, - schemaType: SCHEMA_TYPE_AVRO, - }), - }, - ]; - writer.produce({ messages: messages }); - } + for (let index = 0; index < 100; index++) { + let messages = [ + { + value: schemaRegistry.serialize({ + data: { + name: "xk6-kafka", + }, + schema: { schema: valueSchema }, + schemaType: SCHEMA_TYPE_AVRO, + }), + }, + ]; + writer.produce({ messages: messages }); + } - // Read 10 messages only - let messages = reader.consume({ limit: 10 }); - check(messages, { - "10 messages returned": (msgs) => msgs.length == 10, - "value is correct": (msgs) => - schemaRegistry.deserialize({ - data: msgs[0].value, - schema: { schema: valueSchema }, - schemaType: SCHEMA_TYPE_AVRO, - }).name == "xk6-kafka", - }); + // Read 10 messages only + let messages = reader.consume({ limit: 10 }); + check(messages, { + "10 messages returned": (msgs) => msgs.length == 10, + "value is correct": (msgs) => + schemaRegistry.deserialize({ + data: msgs[0].value, + schema: { schema: valueSchema }, + schemaType: SCHEMA_TYPE_AVRO, + }).name == "xk6-kafka", + }); } export function teardown(data) { - if (__VU == 0) { - // Delete the topic - connection.deleteTopic(topic); - } - writer.close(); - reader.close(); - connection.close(); + if (__VU == 0) { + // Delete the topic + connection.deleteTopic(topic); + } + writer.close(); + reader.close(); + connection.close(); } diff --git a/scripts/test_avro_with_schema_registry.js b/scripts/test_avro_with_schema_registry.js index ab86e7f..a91e042 100644 --- a/scripts/test_avro_with_schema_registry.js +++ b/scripts/test_avro_with_schema_registry.js @@ -5,38 +5,38 @@ tests Kafka with a 100 Avro messages per iteration. import { check } from "k6"; import { - Writer, - Reader, - Connection, - SchemaRegistry, - KEY, - VALUE, - TOPIC_NAME_STRATEGY, - RECORD_NAME_STRATEGY, - SCHEMA_TYPE_AVRO, + Writer, + Reader, + Connection, + SchemaRegistry, + KEY, + VALUE, + TOPIC_NAME_STRATEGY, + RECORD_NAME_STRATEGY, + SCHEMA_TYPE_AVRO, } from "k6/x/kafka"; // import kafka extension const brokers = ["localhost:9092"]; const topic = "com.example.person"; const writer = new Writer({ - brokers: brokers, - topic: topic, - autoCreateTopic: true, + brokers: brokers, + topic: topic, + autoCreateTopic: true, }); const reader = new Reader({ - brokers: brokers, - topic: topic, + brokers: brokers, + topic: topic, }); const connection = new Connection({ - address: brokers[0], + address: brokers[0], }); const schemaRegistry = new SchemaRegistry({ - url: "http://localhost:8081", + url: "http://localhost:8081", }); if (__VU == 0) { - connection.createTopic({ topic: topic }); + connection.createTopic({ topic: topic }); } const keySchema = `{ @@ -68,90 +68,90 @@ const valueSchema = `{ }`; const keySubjectName = schemaRegistry.getSubjectName({ - topic: topic, - element: KEY, - subjectNameStrategy: TOPIC_NAME_STRATEGY, - schema: keySchema, + topic: topic, + element: KEY, + subjectNameStrategy: TOPIC_NAME_STRATEGY, + schema: keySchema, }); const valueSubjectName = schemaRegistry.getSubjectName({ - topic: topic, - element: VALUE, - subjectNameStrategy: RECORD_NAME_STRATEGY, - schema: valueSchema, + topic: topic, + element: VALUE, + subjectNameStrategy: RECORD_NAME_STRATEGY, + schema: valueSchema, }); const keySchemaObject = schemaRegistry.createSchema({ - subject: keySubjectName, - schema: keySchema, - schemaType: SCHEMA_TYPE_AVRO, + subject: keySubjectName, + schema: keySchema, + schemaType: SCHEMA_TYPE_AVRO, }); const valueSchemaObject = schemaRegistry.createSchema({ - subject: valueSubjectName, - schema: valueSchema, - schemaType: SCHEMA_TYPE_AVRO, + subject: valueSubjectName, + schema: valueSchema, + schemaType: SCHEMA_TYPE_AVRO, }); export default function () { - for (let index = 0; index < 100; index++) { - let messages = [ - { - key: schemaRegistry.serialize({ - data: { - ssn: "ssn-" + index, - }, - schema: keySchemaObject, - schemaType: SCHEMA_TYPE_AVRO, - }), - value: schemaRegistry.serialize({ - data: { - firstName: "firstName-" + index, - lastName: "lastName-" + index, - }, - schema: valueSchemaObject, - schemaType: SCHEMA_TYPE_AVRO, - }), - }, - ]; - writer.produce({ messages: messages }); - } + for (let index = 0; index < 100; index++) { + let messages = [ + { + key: schemaRegistry.serialize({ + data: { + ssn: "ssn-" + index, + }, + schema: keySchemaObject, + schemaType: SCHEMA_TYPE_AVRO, + }), + value: schemaRegistry.serialize({ + data: { + firstName: "firstName-" + index, + lastName: "lastName-" + index, + }, + schema: valueSchemaObject, + schemaType: SCHEMA_TYPE_AVRO, + }), + }, + ]; + writer.produce({ messages: messages }); + } - let messages = reader.consume({ limit: 20 }); - check(messages, { - "20 message returned": (msgs) => msgs.length == 20, - "key starts with 'ssn-' string": (msgs) => - schemaRegistry - .deserialize({ - data: msgs[0].key, - schema: keySchemaObject, - schemaType: SCHEMA_TYPE_AVRO, - }) - .ssn.startsWith("ssn-"), - "value contains 'firstName-' and 'lastName-' strings": (msgs) => - schemaRegistry - .deserialize({ - data: msgs[0].value, - schema: valueSchemaObject, - schemaType: SCHEMA_TYPE_AVRO, - }) - .firstName.startsWith("firstName-") && - schemaRegistry - .deserialize({ - data: msgs[0].value, - schema: valueSchemaObject, - schemaType: SCHEMA_TYPE_AVRO, - }) - .lastName.startsWith("lastName-"), - }); + let messages = reader.consume({ limit: 20 }); + check(messages, { + "20 message returned": (msgs) => msgs.length == 20, + "key starts with 'ssn-' string": (msgs) => + schemaRegistry + .deserialize({ + data: msgs[0].key, + schema: keySchemaObject, + schemaType: SCHEMA_TYPE_AVRO, + }) + .ssn.startsWith("ssn-"), + "value contains 'firstName-' and 'lastName-' strings": (msgs) => + schemaRegistry + .deserialize({ + data: msgs[0].value, + schema: valueSchemaObject, + schemaType: SCHEMA_TYPE_AVRO, + }) + .firstName.startsWith("firstName-") && + schemaRegistry + .deserialize({ + data: msgs[0].value, + schema: valueSchemaObject, + schemaType: SCHEMA_TYPE_AVRO, + }) + .lastName.startsWith("lastName-"), + }); } export function teardown(data) { - if (__VU == 0) { - // Delete the topic - connection.deleteTopic(topic); - } - writer.close(); - reader.close(); - connection.close(); + if (__VU == 0) { + // Delete the topic + connection.deleteTopic(topic); + } + writer.close(); + reader.close(); + connection.close(); } diff --git a/scripts/test_bytes.js b/scripts/test_bytes.js index 3f0fe38..44dc108 100644 --- a/scripts/test_bytes.js +++ b/scripts/test_bytes.js @@ -6,87 +6,96 @@ tests Kafka with a 200 byte array messages per iteration. */ import { check } from "k6"; -import { Writer, Reader, Connection, SchemaRegistry, SCHEMA_TYPE_BYTES } from "k6/x/kafka"; // import kafka extension +import { + Writer, + Reader, + Connection, + SchemaRegistry, + SCHEMA_TYPE_BYTES, +} from "k6/x/kafka"; // import kafka extension const brokers = ["localhost:9092"]; const topic = "xk6_kafka_byte_array_topic"; const writer = new Writer({ - brokers: brokers, - topic: topic, - autoCreateTopic: true, + brokers: brokers, + topic: topic, + autoCreateTopic: true, }); const reader = new Reader({ - brokers: brokers, - topic: topic, + brokers: brokers, + topic: topic, }); const connection = new Connection({ - address: brokers[0], + address: brokers[0], }); const schemaRegistry = new SchemaRegistry(); if (__VU == 0) { - connection.createTopic({ topic: topic }); + connection.createTopic({ topic: topic }); } const payload = "byte array payload"; export default function () { - for (let index = 0; index < 100; index++) { - let messages = [ - { - // The data type of the key is a string - key: schemaRegistry.serialize({ - data: Array.from("test-id-abc-" + index, (x) => x.charCodeAt(0)), - schemaType: SCHEMA_TYPE_BYTES, - }), - // The data type of the value is a byte array - value: schemaRegistry.serialize({ - data: Array.from(payload, (x) => x.charCodeAt(0)), - schemaType: SCHEMA_TYPE_BYTES, - }), - }, - { - key: schemaRegistry.serialize({ - data: Array.from("test-id-def-" + index, (x) => x.charCodeAt(0)), - schemaType: SCHEMA_TYPE_BYTES, - }), - value: schemaRegistry.serialize({ - data: Array.from(payload, (x) => x.charCodeAt(0)), - schemaType: SCHEMA_TYPE_BYTES, - }), - }, - ]; + for (let index = 0; index < 100; index++) { + let messages = [ + { + // The data type of the key is a string + key: schemaRegistry.serialize({ + data: Array.from("test-id-abc-" + index, (x) => x.charCodeAt(0)), + schemaType: SCHEMA_TYPE_BYTES, + }), + // The data type of the value is a byte array + value: schemaRegistry.serialize({ + data: Array.from(payload, (x) => x.charCodeAt(0)), + schemaType: SCHEMA_TYPE_BYTES, + }), + }, + { + key: schemaRegistry.serialize({ + data: Array.from("test-id-def-" + index, (x) => x.charCodeAt(0)), + schemaType: SCHEMA_TYPE_BYTES, + }), + value: schemaRegistry.serialize({ + data: Array.from(payload, (x) => x.charCodeAt(0)), + schemaType: SCHEMA_TYPE_BYTES, + }), + }, + ]; - writer.produce({ - messages: messages, - }); - } - - // Read 10 messages only - let messages = reader.consume({ limit: 10 }); - check(messages, { - "10 messages returned": (msgs) => msgs.length == 10, - "key starts with 'test-id-' string": (msgs) => - String.fromCharCode( - ...schemaRegistry.deserialize({ data: msgs[0].key, schemaType: SCHEMA_TYPE_BYTES }) - ).startsWith("test-id-"), - "value is correct": (msgs) => - String.fromCharCode( - ...schemaRegistry.deserialize({ - data: msgs[0].value, - schemaType: SCHEMA_TYPE_BYTES, - }) - ) == payload, + writer.produce({ + messages: messages, }); + } + + // Read 10 messages only + let messages = reader.consume({ limit: 10 }); + check(messages, { + "10 messages returned": (msgs) => msgs.length == 10, + "key starts with 'test-id-' string": (msgs) => + String.fromCharCode( + ...schemaRegistry.deserialize({ + data: msgs[0].key, + schemaType: SCHEMA_TYPE_BYTES, + }) + ).startsWith("test-id-"), + "value is correct": (msgs) => + String.fromCharCode( + ...schemaRegistry.deserialize({ + data: msgs[0].value, + schemaType: SCHEMA_TYPE_BYTES, + }) + ) == payload, + }); } export function teardown(data) { - if (__VU == 0) { - // Delete the topic - connection.deleteTopic(topic); - } - writer.close(); - reader.close(); - connection.close(); + if (__VU == 0) { + // Delete the topic + connection.deleteTopic(topic); + } + writer.close(); + reader.close(); + connection.close(); } diff --git a/scripts/test_json.js b/scripts/test_json.js index 3ceae52..dc710df 100644 --- a/scripts/test_json.js +++ b/scripts/test_json.js @@ -8,12 +8,12 @@ tests Kafka with a 200 JSON messages per iteration. import { check } from "k6"; // import * as kafka from "k6/x/kafka"; import { - Writer, - Reader, - Connection, - SchemaRegistry, - CODEC_SNAPPY, - SCHEMA_TYPE_JSON, + Writer, + Reader, + Connection, + SchemaRegistry, + CODEC_SNAPPY, + SCHEMA_TYPE_JSON, } from "k6/x/kafka"; // import kafka extension // Prints module-level constants @@ -25,127 +25,135 @@ const topic = "xk6_kafka_json_topic"; // const writer = new kafka.Writer(...); // const reader = new kafka.Reader(...); const writer = new Writer({ - brokers: brokers, - topic: topic, - autoCreateTopic: true, - compression: CODEC_SNAPPY, + brokers: brokers, + topic: topic, + autoCreateTopic: true, + compression: CODEC_SNAPPY, }); const reader = new Reader({ - brokers: brokers, - topic: topic, + brokers: brokers, + topic: topic, }); const connection = new Connection({ - address: brokers[0], + address: brokers[0], }); const schemaRegistry = new SchemaRegistry(); if (__VU == 0) { - connection.createTopic({ - topic: topic, - configEntries: [{ - configName: "compression.type", - configValue: CODEC_SNAPPY, - }, ], - }); + connection.createTopic({ + topic: topic, + configEntries: [ + { + configName: "compression.type", + configValue: CODEC_SNAPPY, + }, + ], + }); } export const options = { - thresholds: { - // Base thresholds to see if the writer or reader is working - "kafka_writer_error_count": ["count == 0"], - "kafka_reader_error_count": ["count == 0"], - }, + thresholds: { + // Base thresholds to see if the writer or reader is working + kafka_writer_error_count: ["count == 0"], + kafka_reader_error_count: ["count == 0"], + }, }; -export default function() { - for (let index = 0; index < 100; index++) { - let messages = [{ - // The data type of the key is JSON - key: schemaRegistry.serialize({ - data: { - correlationId: "test-id-abc-" + index, - }, - schemaType: SCHEMA_TYPE_JSON, - }), - // The data type of the value is JSON - value: schemaRegistry.serialize({ - data: { - name: "xk6-kafka", - version: "0.9.0", - author: "Mostafa Moradian", - description: "k6 extension to load test Apache Kafka with support for Avro messages", - index: index, - }, - schemaType: SCHEMA_TYPE_JSON, - }), - headers: { - mykey: "myvalue", - }, - offset: index, - partition: 0, - time: new Date(), // Will be converted to timestamp automatically - }, - { - key: schemaRegistry.serialize({ - data: { - correlationId: "test-id-def-" + index, - }, - schemaType: SCHEMA_TYPE_JSON, - }), - value: schemaRegistry.serialize({ - data: { - name: "xk6-kafka", - version: "0.9.0", - author: "Mostafa Moradian", - description: "k6 extension to load test Apache Kafka with support for Avro messages", - index: index, - }, - schemaType: SCHEMA_TYPE_JSON, - }), - headers: { - mykey: "myvalue", - }, - }, - ]; +export default function () { + for (let index = 0; index < 100; index++) { + let messages = [ + { + // The data type of the key is JSON + key: schemaRegistry.serialize({ + data: { + correlationId: "test-id-abc-" + index, + }, + schemaType: SCHEMA_TYPE_JSON, + }), + // The data type of the value is JSON + value: schemaRegistry.serialize({ + data: { + name: "xk6-kafka", + version: "0.9.0", + author: "Mostafa Moradian", + description: + "k6 extension to load test Apache Kafka with support for Avro messages", + index: index, + }, + schemaType: SCHEMA_TYPE_JSON, + }), + headers: { + mykey: "myvalue", + }, + offset: index, + partition: 0, + time: new Date(), // Will be converted to timestamp automatically + }, + { + key: schemaRegistry.serialize({ + data: { + correlationId: "test-id-def-" + index, + }, + schemaType: SCHEMA_TYPE_JSON, + }), + value: schemaRegistry.serialize({ + data: { + name: "xk6-kafka", + version: "0.9.0", + author: "Mostafa Moradian", + description: + "k6 extension to load test Apache Kafka with support for Avro messages", + index: index, + }, + schemaType: SCHEMA_TYPE_JSON, + }), + headers: { + mykey: "myvalue", + }, + }, + ]; - writer.produce({ messages: messages }); - } + writer.produce({ messages: messages }); + } - // Read 10 messages only - let messages = reader.consume({ limit: 10 }); + // Read 10 messages only + let messages = reader.consume({ limit: 10 }); - check(messages, { - "10 messages are received": (messages) => messages.length == 10, - }); + check(messages, { + "10 messages are received": (messages) => messages.length == 10, + }); - check(messages[0], { - "Topic equals to xk6_kafka_json_topic": (msg) => msg["topic"] == topic, - "Key contains key/value and is JSON": (msg) => - schemaRegistry - .deserialize({ data: msg.key, schemaType: SCHEMA_TYPE_JSON }) - .correlationId.startsWith("test-id-"), - "Value contains key/value and is JSON": (msg) => - typeof schemaRegistry.deserialize({ - data: msg.value, - schemaType: SCHEMA_TYPE_JSON, - }) == "object" && - schemaRegistry.deserialize({ data: msg.value, schemaType: SCHEMA_TYPE_JSON }).name == - "xk6-kafka", - "Header equals {'mykey': 'myvalue'}": (msg) => - "mykey" in msg.headers && String.fromCharCode(...msg.headers["mykey"]) == "myvalue", - "Time is past": (msg) => new Date(msg["time"]) < new Date(), - "Partition is zero": (msg) => msg["partition"] == 0, - "Offset is gte zero": (msg) => msg["offset"] >= 0, - "High watermark is gte zero": (msg) => msg["highWaterMark"] >= 0, - }); + check(messages[0], { + "Topic equals to xk6_kafka_json_topic": (msg) => msg["topic"] == topic, + "Key contains key/value and is JSON": (msg) => + schemaRegistry + .deserialize({ data: msg.key, schemaType: SCHEMA_TYPE_JSON }) + .correlationId.startsWith("test-id-"), + "Value contains key/value and is JSON": (msg) => + typeof schemaRegistry.deserialize({ + data: msg.value, + schemaType: SCHEMA_TYPE_JSON, + }) == "object" && + schemaRegistry.deserialize({ + data: msg.value, + schemaType: SCHEMA_TYPE_JSON, + }).name == "xk6-kafka", + "Header equals {'mykey': 'myvalue'}": (msg) => + "mykey" in msg.headers && + String.fromCharCode(...msg.headers["mykey"]) == "myvalue", + "Time is past": (msg) => new Date(msg["time"]) < new Date(), + "Partition is zero": (msg) => msg["partition"] == 0, + "Offset is gte zero": (msg) => msg["offset"] >= 0, + "High watermark is gte zero": (msg) => msg["highWaterMark"] >= 0, + }); } export function teardown(data) { - if (__VU == 0) { - // Delete the topic - connection.deleteTopic(topic); - } - writer.close(); - reader.close(); - connection.close(); -} \ No newline at end of file + if (__VU == 0) { + // Delete the topic + connection.deleteTopic(topic); + } + writer.close(); + reader.close(); + connection.close(); +} diff --git a/scripts/test_jsonschema_with_schema_registry.js b/scripts/test_jsonschema_with_schema_registry.js index 755bd7c..2dada96 100644 --- a/scripts/test_jsonschema_with_schema_registry.js +++ b/scripts/test_jsonschema_with_schema_registry.js @@ -5,154 +5,154 @@ tests Kafka with a 100 Avro messages per iteration. import { check } from "k6"; import { - Writer, - Reader, - Connection, - SchemaRegistry, - KEY, - VALUE, - SCHEMA_TYPE_JSON, - TOPIC_NAME_STRATEGY, + Writer, + Reader, + Connection, + SchemaRegistry, + KEY, + VALUE, + SCHEMA_TYPE_JSON, + TOPIC_NAME_STRATEGY, } from "k6/x/kafka"; // import kafka extension const brokers = ["localhost:9092"]; const topic = "xk6_jsonschema_test"; const writer = new Writer({ - brokers: brokers, - topic: topic, - autoCreateTopic: true, + brokers: brokers, + topic: topic, + autoCreateTopic: true, }); const reader = new Reader({ - brokers: brokers, - topic: topic, + brokers: brokers, + topic: topic, }); const connection = new Connection({ - address: brokers[0], + address: brokers[0], }); const schemaRegistry = new SchemaRegistry({ - url: "http://localhost:8081", + url: "http://localhost:8081", }); if (__VU == 0) { - connection.createTopic({ topic: topic }); + connection.createTopic({ topic: topic }); } const keySchema = JSON.stringify({ - title: "Key", - type: "object", - properties: { - key: { - type: "string", - description: "A key.", - }, + title: "Key", + type: "object", + properties: { + key: { + type: "string", + description: "A key.", }, + }, }); const valueSchema = JSON.stringify({ - title: "Value", - type: "object", - properties: { - firstName: { - type: "string", - description: "First name.", - }, - lastName: { - type: "string", - description: "Last name.", - }, + title: "Value", + type: "object", + properties: { + firstName: { + type: "string", + description: "First name.", }, + lastName: { + type: "string", + description: "Last name.", + }, + }, }); const keySubjectName = schemaRegistry.getSubjectName({ - topic: topic, - element: KEY, - subjectNameStrategy: TOPIC_NAME_STRATEGY, - schema: keySchema, + topic: topic, + element: KEY, + subjectNameStrategy: TOPIC_NAME_STRATEGY, + schema: keySchema, }); const valueSubjectName = schemaRegistry.getSubjectName({ - topic: topic, - element: VALUE, - subjectNameStrategy: TOPIC_NAME_STRATEGY, - schema: valueSchema, + topic: topic, + element: VALUE, + subjectNameStrategy: TOPIC_NAME_STRATEGY, + schema: valueSchema, }); const keySchemaObject = schemaRegistry.createSchema({ - subject: keySubjectName, - schema: keySchema, - schemaType: SCHEMA_TYPE_JSON, + subject: keySubjectName, + schema: keySchema, + schemaType: SCHEMA_TYPE_JSON, }); const valueSchemaObject = schemaRegistry.createSchema({ - subject: valueSubjectName, - schema: valueSchema, - schemaType: SCHEMA_TYPE_JSON, + subject: valueSubjectName, + schema: valueSchema, + schemaType: SCHEMA_TYPE_JSON, }); export default function () { - for (let index = 0; index < 100; index++) { - let messages = [ - { - key: schemaRegistry.serialize({ - data: { - key: "key-" + index, - }, - schema: keySchemaObject, - schemaType: SCHEMA_TYPE_JSON, - }), - value: schemaRegistry.serialize({ - data: { - firstName: "firstName-" + index, - lastName: "lastName-" + index, - }, - schema: valueSchemaObject, - schemaType: SCHEMA_TYPE_JSON, - }), - }, - ]; - writer.produce({ messages: messages }); - } + for (let index = 0; index < 100; index++) { + let messages = [ + { + key: schemaRegistry.serialize({ + data: { + key: "key-" + index, + }, + schema: keySchemaObject, + schemaType: SCHEMA_TYPE_JSON, + }), + value: schemaRegistry.serialize({ + data: { + firstName: "firstName-" + index, + lastName: "lastName-" + index, + }, + schema: valueSchemaObject, + schemaType: SCHEMA_TYPE_JSON, + }), + }, + ]; + writer.produce({ messages: messages }); + } - let messages = reader.consume({ limit: 20 }); - check(messages, { - "20 message returned": (msgs) => msgs.length == 20, - }); + let messages = reader.consume({ limit: 20 }); + check(messages, { + "20 message returned": (msgs) => msgs.length == 20, + }); - check(messages, { - "20 message returned": (msgs) => msgs.length == 20, - "key starts with 'key-' string": (msgs) => - schemaRegistry - .deserialize({ - data: msgs[0].key, - schema: keySchemaObject, - schemaType: SCHEMA_TYPE_JSON, - }) - .key.startsWith("key-"), - "value contains 'firstName-' and 'lastName-' strings": (msgs) => - schemaRegistry - .deserialize({ - data: msgs[0].value, - schema: valueSchemaObject, - schemaType: SCHEMA_TYPE_JSON, - }) - .firstName.startsWith("firstName-") && - schemaRegistry - .deserialize({ - data: msgs[0].value, - schema: valueSchemaObject, - schemaType: SCHEMA_TYPE_JSON, - }) - .lastName.startsWith("lastName-"), - }); + check(messages, { + "20 message returned": (msgs) => msgs.length == 20, + "key starts with 'key-' string": (msgs) => + schemaRegistry + .deserialize({ + data: msgs[0].key, + schema: keySchemaObject, + schemaType: SCHEMA_TYPE_JSON, + }) + .key.startsWith("key-"), + "value contains 'firstName-' and 'lastName-' strings": (msgs) => + schemaRegistry + .deserialize({ + data: msgs[0].value, + schema: valueSchemaObject, + schemaType: SCHEMA_TYPE_JSON, + }) + .firstName.startsWith("firstName-") && + schemaRegistry + .deserialize({ + data: msgs[0].value, + schema: valueSchemaObject, + schemaType: SCHEMA_TYPE_JSON, + }) + .lastName.startsWith("lastName-"), + }); } export function teardown(data) { - if (__VU == 0) { - // Delete the topic - connection.deleteTopic(topic); - } - writer.close(); - reader.close(); - connection.close(); + if (__VU == 0) { + // Delete the topic + connection.deleteTopic(topic); + } + writer.close(); + reader.close(); + connection.close(); } diff --git a/scripts/test_sasl_auth.js b/scripts/test_sasl_auth.js index fe62e72..e8fefde 100644 --- a/scripts/test_sasl_auth.js +++ b/scripts/test_sasl_auth.js @@ -8,26 +8,26 @@ also uses SASL authentication. import { check } from "k6"; import { - Writer, - Reader, - Connection, - SchemaRegistry, - SCHEMA_TYPE_JSON, - SASL_PLAIN, - TLS_1_2, + Writer, + Reader, + Connection, + SchemaRegistry, + SCHEMA_TYPE_JSON, + SASL_PLAIN, + TLS_1_2, } from "k6/x/kafka"; // import kafka extension export const options = { - // This is used for testing purposes. For real-world use, you should use your own options: - // https://k6.io/docs/using-k6/k6-options/ - scenarios: { - sasl_auth: { - executor: "constant-vus", - vus: 1, - duration: "10s", - gracefulStop: "1s", - }, + // This is used for testing purposes. For real-world use, you should use your own options: + // https://k6.io/docs/using-k6/k6-options/ + scenarios: { + sasl_auth: { + executor: "constant-vus", + vus: 1, + duration: "10s", + gracefulStop: "1s", }, + }, }; const brokers = ["localhost:9092"]; @@ -35,34 +35,34 @@ const topic = "xk6_kafka_json_topic"; // SASL config is optional const saslConfig = { - username: "client", - password: "client-secret", - // Possible values for the algorithm is: - // NONE (default) - // SASL_PLAIN - // SASL_SCRAM_SHA256 - // SASL_SCRAM_SHA512 - // SASL_SSL (must enable TLS) - algorithm: SASL_PLAIN, + username: "client", + password: "client-secret", + // Possible values for the algorithm is: + // NONE (default) + // SASL_PLAIN + // SASL_SCRAM_SHA256 + // SASL_SCRAM_SHA512 + // SASL_SSL (must enable TLS) + algorithm: SASL_PLAIN, }; // TLS config is optional const tlsConfig = { - // Enable/disable TLS (default: false) - enableTls: false, - // Skip TLS verification if the certificate is invalid or self-signed (default: false) - insecureSkipTlsVerify: false, - // Possible values: - // TLS_1_0 - // TLS_1_1 - // TLS_1_2 (default) - // TLS_1_3 - minVersion: TLS_1_2, + // Enable/disable TLS (default: false) + enableTls: false, + // Skip TLS verification if the certificate is invalid or self-signed (default: false) + insecureSkipTlsVerify: false, + // Possible values: + // TLS_1_0 + // TLS_1_1 + // TLS_1_2 (default) + // TLS_1_3 + minVersion: TLS_1_2, - // Only needed if you have a custom or self-signed certificate and keys - // clientCertPem: "/path/to/your/client.pem", - // clientKeyPem: "/path/to/your/client-key.pem", - // serverCaPem: "/path/to/your/ca.pem", + // Only needed if you have a custom or self-signed certificate and keys + // clientCertPem: "/path/to/your/client.pem", + // clientKeyPem: "/path/to/your/client-key.pem", + // serverCaPem: "/path/to/your/ca.pem", }; const offset = 0; @@ -72,91 +72,96 @@ const numPartitions = 1; const replicationFactor = 1; const writer = new Writer({ - brokers: brokers, - topic: topic, - sasl: saslConfig, - tls: tlsConfig, + brokers: brokers, + topic: topic, + sasl: saslConfig, + tls: tlsConfig, }); const reader = new Reader({ - brokers: brokers, - topic: topic, - partition: partition, - offset: offset, - sasl: saslConfig, - tls: tlsConfig, + brokers: brokers, + topic: topic, + partition: partition, + offset: offset, + sasl: saslConfig, + tls: tlsConfig, }); const connection = new Connection({ - address: brokers[0], - sasl: saslConfig, - tls: tlsConfig, + address: brokers[0], + sasl: saslConfig, + tls: tlsConfig, }); const schemaRegistry = new SchemaRegistry(); if (__VU == 0) { - connection.createTopic({ - topic: topic, - numPartitions: numPartitions, - replicationFactor: replicationFactor, - }); - console.log("Existing topics: ", connection.listTopics(saslConfig, tlsConfig)); + connection.createTopic({ + topic: topic, + numPartitions: numPartitions, + replicationFactor: replicationFactor, + }); + console.log( + "Existing topics: ", + connection.listTopics(saslConfig, tlsConfig) + ); } export default function () { - for (let index = 0; index < 100; index++) { - let messages = [ - { - key: schemaRegistry.serialize({ - data: { - correlationId: "test-id-abc-" + index, - }, - schemaType: SCHEMA_TYPE_JSON, - }), - value: schemaRegistry.serialize({ - data: { - name: "xk6-kafka", - }, - schemaType: SCHEMA_TYPE_JSON, - }), - }, - { - key: schemaRegistry.serialize({ - data: { - correlationId: "test-id-def-" + index, - }, - schemaType: SCHEMA_TYPE_JSON, - }), - value: schemaRegistry.serialize({ - data: { - name: "xk6-kafka", - }, - schemaType: SCHEMA_TYPE_JSON, - }), - }, - ]; + for (let index = 0; index < 100; index++) { + let messages = [ + { + key: schemaRegistry.serialize({ + data: { + correlationId: "test-id-abc-" + index, + }, + schemaType: SCHEMA_TYPE_JSON, + }), + value: schemaRegistry.serialize({ + data: { + name: "xk6-kafka", + }, + schemaType: SCHEMA_TYPE_JSON, + }), + }, + { + key: schemaRegistry.serialize({ + data: { + correlationId: "test-id-def-" + index, + }, + schemaType: SCHEMA_TYPE_JSON, + }), + value: schemaRegistry.serialize({ + data: { + name: "xk6-kafka", + }, + schemaType: SCHEMA_TYPE_JSON, + }), + }, + ]; - writer.produce({ messages: messages }); - } + writer.produce({ messages: messages }); + } - // Read 10 messages only - let messages = reader.consume({ limit: 10 }); - check(messages, { - "10 messages returned": (msgs) => msgs.length == 10, - "key is correct": (msgs) => - schemaRegistry - .deserialize({ data: msgs[0].key, schemaType: SCHEMA_TYPE_JSON }) - .correlationId.startsWith("test-id-"), - "value is correct": (msgs) => - schemaRegistry.deserialize({ data: msgs[0].value, schemaType: SCHEMA_TYPE_JSON }) - .name == "xk6-kafka", - }); + // Read 10 messages only + let messages = reader.consume({ limit: 10 }); + check(messages, { + "10 messages returned": (msgs) => msgs.length == 10, + "key is correct": (msgs) => + schemaRegistry + .deserialize({ data: msgs[0].key, schemaType: SCHEMA_TYPE_JSON }) + .correlationId.startsWith("test-id-"), + "value is correct": (msgs) => + schemaRegistry.deserialize({ + data: msgs[0].value, + schemaType: SCHEMA_TYPE_JSON, + }).name == "xk6-kafka", + }); } export function teardown(data) { - if (__VU == 0) { - // Delete the topic - connection.deleteTopic(topic); - } - writer.close(); - reader.close(); - connection.close(); + if (__VU == 0) { + // Delete the topic + connection.deleteTopic(topic); + } + writer.close(); + reader.close(); + connection.close(); } diff --git a/scripts/test_string.js b/scripts/test_string.js index ef8553e..c5ef7c3 100644 --- a/scripts/test_string.js +++ b/scripts/test_string.js @@ -7,7 +7,13 @@ tests Kafka with a 200 JSON messages per iteration. import { check } from "k6"; // import * as kafka from "k6/x/kafka"; -import { Writer, Reader, Connection, SchemaRegistry, SCHEMA_TYPE_STRING } from "k6/x/kafka"; // import kafka extension +import { + Writer, + Reader, + Connection, + SchemaRegistry, + SCHEMA_TYPE_STRING, +} from "k6/x/kafka"; // import kafka extension // Prints module-level constants // console.log(kafka); @@ -16,101 +22,107 @@ const brokers = ["localhost:9092"]; const topic = "xk6_kafka_json_topic"; const writer = new Writer({ - brokers: brokers, - topic: topic, - autoCreateTopic: true, + brokers: brokers, + topic: topic, + autoCreateTopic: true, }); const reader = new Reader({ - brokers: brokers, - topic: topic, + brokers: brokers, + topic: topic, }); const connection = new Connection({ - address: brokers[0], + address: brokers[0], }); const schemaRegistry = new SchemaRegistry(); if (__VU == 0) { - connection.createTopic({ topic: topic }); + connection.createTopic({ topic: topic }); } export const options = { - thresholds: { - // Base thresholds to see if the writer or reader is working - "kafka_writer_error_count": ["count == 0"], - "kafka_reader_error_count": ["count == 0"], - }, + thresholds: { + // Base thresholds to see if the writer or reader is working + kafka_writer_error_count: ["count == 0"], + kafka_reader_error_count: ["count == 0"], + }, }; -export default function() { - for (let index = 0; index < 100; index++) { - let messages = [{ - key: schemaRegistry.serialize({ - data: "test-key-string", - schemaType: SCHEMA_TYPE_STRING, - }), - value: schemaRegistry.serialize({ - data: "test-value-string", - schemaType: SCHEMA_TYPE_STRING, - }), - headers: { - mykey: "myvalue", - }, - offset: index, - partition: 0, - time: new Date(), // Will be converted to timestamp automatically - }, - { - key: schemaRegistry.serialize({ - data: "test-key-string", - schemaType: SCHEMA_TYPE_STRING, - }), - value: schemaRegistry.serialize({ - data: "test-value-string", - schemaType: SCHEMA_TYPE_STRING, - }), - headers: { - mykey: "myvalue", - }, - }, - ]; +export default function () { + for (let index = 0; index < 100; index++) { + let messages = [ + { + key: schemaRegistry.serialize({ + data: "test-key-string", + schemaType: SCHEMA_TYPE_STRING, + }), + value: schemaRegistry.serialize({ + data: "test-value-string", + schemaType: SCHEMA_TYPE_STRING, + }), + headers: { + mykey: "myvalue", + }, + offset: index, + partition: 0, + time: new Date(), // Will be converted to timestamp automatically + }, + { + key: schemaRegistry.serialize({ + data: "test-key-string", + schemaType: SCHEMA_TYPE_STRING, + }), + value: schemaRegistry.serialize({ + data: "test-value-string", + schemaType: SCHEMA_TYPE_STRING, + }), + headers: { + mykey: "myvalue", + }, + }, + ]; - writer.produce({ messages: messages }); - } + writer.produce({ messages: messages }); + } - // Read 10 messages only - let messages = reader.consume({ limit: 10 }); + // Read 10 messages only + let messages = reader.consume({ limit: 10 }); - check(messages, { - "10 messages are received": (messages) => messages.length == 10, - }); + check(messages, { + "10 messages are received": (messages) => messages.length == 10, + }); - check(messages[0], { - "Topic equals to xk6_kafka_json_topic": (msg) => msg["topic"] == topic, - "Key is a string and is correct": (msg) => - schemaRegistry.deserialize({ data: msg.key, schemaType: SCHEMA_TYPE_STRING }) == - "test-key-string", - "Value is a string and is correct": (msg) => - typeof schemaRegistry.deserialize({ - data: msg.value, - schemaType: SCHEMA_TYPE_STRING, - }) == "string" && - schemaRegistry.deserialize({ data: msg.value, schemaType: SCHEMA_TYPE_STRING }) == - "test-value-string", - "Header equals {'mykey': 'myvalue'}": (msg) => - "mykey" in msg.headers && String.fromCharCode(...msg.headers["mykey"]) == "myvalue", - "Time is past": (msg) => new Date(msg["time"]) < new Date(), - "Partition is zero": (msg) => msg["partition"] == 0, - "Offset is gte zero": (msg) => msg["offset"] >= 0, - "High watermark is gte zero": (msg) => msg["highWaterMark"] >= 0, - }); + check(messages[0], { + "Topic equals to xk6_kafka_json_topic": (msg) => msg["topic"] == topic, + "Key is a string and is correct": (msg) => + schemaRegistry.deserialize({ + data: msg.key, + schemaType: SCHEMA_TYPE_STRING, + }) == "test-key-string", + "Value is a string and is correct": (msg) => + typeof schemaRegistry.deserialize({ + data: msg.value, + schemaType: SCHEMA_TYPE_STRING, + }) == "string" && + schemaRegistry.deserialize({ + data: msg.value, + schemaType: SCHEMA_TYPE_STRING, + }) == "test-value-string", + "Header equals {'mykey': 'myvalue'}": (msg) => + "mykey" in msg.headers && + String.fromCharCode(...msg.headers["mykey"]) == "myvalue", + "Time is past": (msg) => new Date(msg["time"]) < new Date(), + "Partition is zero": (msg) => msg["partition"] == 0, + "Offset is gte zero": (msg) => msg["offset"] >= 0, + "High watermark is gte zero": (msg) => msg["highWaterMark"] >= 0, + }); } export function teardown(data) { - if (__VU == 0) { - // Delete the topic - connection.deleteTopic(topic); - } - writer.close(); - reader.close(); - connection.close(); -} \ No newline at end of file + if (__VU == 0) { + // Delete the topic + connection.deleteTopic(topic); + } + writer.close(); + reader.close(); + connection.close(); +} diff --git a/scripts/test_topics.js b/scripts/test_topics.js index 92cc8dd..8714408 100644 --- a/scripts/test_topics.js +++ b/scripts/test_topics.js @@ -11,20 +11,20 @@ const address = "localhost:9092"; const topic = "xk6_kafka_test_topic"; const connection = new Connection({ - address: address, + address: address, }); const results = connection.listTopics(); connection.createTopic({ topic: topic }); export default function () { - results.forEach((topic) => console.log(topic)); + results.forEach((topic) => console.log(topic)); } export function teardown(data) { - if (__VU == 0) { - // Delete the topic - connection.deleteTopic(topic); - } - connection.close(); + if (__VU == 0) { + // Delete the topic + connection.deleteTopic(topic); + } + connection.close(); }