Skip to content

Commit

Permalink
improve scrapper (#34)
Browse files Browse the repository at this point in the history
* java 21 and native build

* fix config

* improve deployment
  • Loading branch information
gaetancollaud authored Mar 7, 2024
1 parent de9a535 commit 67beaae
Show file tree
Hide file tree
Showing 13 changed files with 100 additions and 47 deletions.
22 changes: 17 additions & 5 deletions .github/workflows/build-scraper.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -14,17 +14,21 @@ jobs:
steps:
- uses: actions/checkout@v4

- name: Set up JDK 17
- name: Set up JDK 21
uses: actions/setup-java@v3
with:
java-version: '17'
java-version: '21'
distribution: 'temurin'
cache: maven

- name: Build with maven
- name: Build and test
working-directory: kafka-scraper/
run: mvn --batch-mode clean package

- name: Build native
working-directory: kafka-scraper/
run: mvn --batch-mode package -DskipTests -Pnative

- name: Upload artifacts
uses: actions/upload-artifact@v4
with:
Expand All @@ -49,10 +53,18 @@ jobs:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}

- name: Build and push
- name: Build and push jvm image
uses: docker/build-push-action@v5
with:
context: kafka-scraper/
file: kafka-scraper/src/main/docker/Dockerfile.jvm
push: true
tags: spoud/kafka-cost-control-scraper:latest
tags: spoud/kafka-cost-control-scraper:latest-jvm

- name: Build and push native
uses: docker/build-push-action@v5
with:
context: kafka-scraper/
file: kafka-scraper/src/main/docker/Dockerfile.native-micro
push: true
tags: spoud/kafka-cost-control-scraper:latest-native
13 changes: 12 additions & 1 deletion deployment/kafka-metric-scrapper/base/scraper.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,19 @@ spec:
spec:
containers:
- name: kafka-cost-control-scraper
image: spoud/kafka-cost-control-scraper:latest
image: spoud/kafka-cost-control-scraper:native
imagePullPolicy: Always
env:
- name: KAFKA_SECURITY_PROTOCOL
value: SASL_SSL
- name: KAFKA_SASL_MECHANISM
value: PLAIN
- name: KAFKA_SASL_JAAS_CONFIG
value: org.apache.kafka.common.security.plain.PlainLoginModule required username="${CLUSTER_API_KEY}" password="${CLUSTER_API_SECRET}";
- name: KAFKA_BASIC_AUTH_CREDENTIALS_SOURCE
value: USER_INFO
- name: KAFKA_BASIC_AUTH_USER_INFO
value: ${SCHEMA_REGISTRY_API_KEY}:${SCHEMA_REGISTRY_API_SECRET}
livenessProbe:
httpGet:
path: /q/health/live
Expand Down
7 changes: 7 additions & 0 deletions deployment/kafka-metric-scrapper/base/telegraf-config.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,13 @@
urls = ${INPUT_URLS}
username = "${INPUT_CONFLUENT_METRICS_API_KEY}"
password = "${INPUT_CONFLUENT_METRICS_API_SECRET}"
[inputs.prometheus.tags]
source="confluent-metric-api"

[[inputs.prometheus]]
urls = ["http://kafka-cost-control-scraper-service:8080/q/metrics"]
[inputs.prometheus.tags]
source="kafka-metric-scrapper"

[[outputs.kafka]]
brokers = ${OUTPUT_BROKERS}
Expand Down
8 changes: 8 additions & 0 deletions deployment/kafka-metric-scrapper/dev/.env-scraper.sample
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
KAFKA_BOOTSTRAP_SERVERS=instance.provider.confluent.cloud:9092
CLUSTER_API_KEY=ccloud-username
CLUSTER_API_SECRET=ccloud-password

KAFKA_SCHEMA_REGISTRY_URL=https://instance.provider.confluent.cloud
SCHEMA_REGISTRY_API_KEY=ccloud-metric-api-key
SCHEMA_REGISTRY_API_SECRET=ccloud-metric-api-secret

9 changes: 6 additions & 3 deletions deployment/kafka-metric-scrapper/dev/kustomization.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,9 @@ generatorOptions:
disableNameSuffixHash: true

secretGenerator:
- name: telegraf-secret
envs:
- .env
- name: telegraf-secret
envs:
- .env-telegraf
- name: kafka-cost-control-scraper-secret
envs:
- .env-scraper
15 changes: 13 additions & 2 deletions kafka-scraper/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,17 @@

exposes metrics at http://127.0.0.1:8080/q/metrics


TODO:
## TODOs
- use separate endpoint for registry

## Build

```bash
mvn clean package -Pnative
```

## Build and push docker image
```bash
docker-compose build
docker-compose push
```
7 changes: 7 additions & 0 deletions kafka-scraper/docker-compose.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
version: '3.0'
services:
scraper:
image: spoud/kafka-cost-control-scraper:dev
build:
context: .
dockerfile: src/main/docker/Dockerfile.native-micro
35 changes: 21 additions & 14 deletions kafka-scraper/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -6,14 +6,16 @@
<artifactId>kcc-kafka-scraper</artifactId>
<version>1.0-SNAPSHOT</version>
<properties>
<compiler-plugin.version>3.11.0</compiler-plugin.version>
<maven.compiler.release>17</maven.compiler.release>
<maven.compiler.release>21</maven.compiler.release>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<skipITs>true</skipITs>

<quarkus.platform.artifact-id>quarkus-bom</quarkus.platform.artifact-id>
<quarkus.platform.group-id>io.quarkus.platform</quarkus.platform.group-id>
<quarkus.platform.version>3.6.8</quarkus.platform.version>
<skipITs>true</skipITs>
<quarkus.platform.version>3.8.1</quarkus.platform.version>

<compiler-plugin.version>3.11.0</compiler-plugin.version>
<surefire-plugin.version>3.1.2</surefire-plugin.version>
</properties>
<dependencyManagement>
Expand Down Expand Up @@ -68,6 +70,15 @@
<groupId>io.quarkus</groupId>
<artifactId>quarkus-container-image-jib</artifactId>
</dependency>
<dependency>
<groupId>io.quarkus</groupId>
<artifactId>quarkus-smallrye-health</artifactId>
</dependency>
<dependency>
<groupId>org.eclipse.microprofile.rest.client</groupId>
<artifactId>microprofile-rest-client-api</artifactId>
</dependency>

<dependency>
<groupId>io.quarkus</groupId>
<artifactId>quarkus-junit5</artifactId>
Expand All @@ -84,16 +95,12 @@
<version>3.24.2</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
<version>3.12.4</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.eclipse.microprofile.rest.client</groupId>
<artifactId>microprofile-rest-client-api</artifactId>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-core</artifactId>
<version>3.12.4</version>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<plugins>
Expand Down
20 changes: 0 additions & 20 deletions kafka-scraper/src/main/resources/application-ccloud.yaml

This file was deleted.

5 changes: 5 additions & 0 deletions kafka-scraper/src/main/resources/application.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,11 @@ kafka:
registry:
url: http://localhost:8081
quarkus:
kafka:
snappy:
enabled: true
native:
container-build: true
rest-client:
schema-registry-api:
url: ${kafka.schema.registry.url}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
import java.time.Duration;
import java.util.List;
import java.util.Set;
import java.util.function.Function;

import static org.mockito.Mockito.*;

Expand All @@ -31,7 +32,7 @@ void setUp() {
jobDefinition = mock(Scheduler.JobDefinition.class);
when(scheduler.newJob(anyString())).thenReturn(jobDefinition);
when(jobDefinition.setInterval(anyString())).thenReturn(jobDefinition);
when(jobDefinition.setAsyncTask(any())).thenReturn(jobDefinition);
when(jobDefinition.setAsyncTask(any(Function.class))).thenReturn(jobDefinition);

configProperties = mock(ScraperConfigProperties.class);
when(configProperties.srEnabled()).thenReturn(true);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.function.Consumer;

import static org.mockito.Mockito.*;

Expand All @@ -39,7 +40,7 @@ void setUp() {
jobDefinition = mock(Scheduler.JobDefinition.class);
when(scheduler.newJob(anyString())).thenReturn(jobDefinition);
when(jobDefinition.setInterval(anyString())).thenReturn(jobDefinition);
when(jobDefinition.setTask(any())).thenReturn(jobDefinition);
when(jobDefinition.setTask(any(Consumer.class))).thenReturn(jobDefinition);

configProperties = mock(ScraperConfigProperties.class);
when(configProperties.srEnabled()).thenReturn(true);
Expand Down

0 comments on commit 67beaae

Please sign in to comment.