Skip to content

Commit

Permalink
Merge latest changes from main branch
Browse files Browse the repository at this point in the history
  • Loading branch information
parthiv11 authored Nov 8, 2024
2 parents fb4c6a9 + c589cb5 commit 8fc42ec
Show file tree
Hide file tree
Showing 2,227 changed files with 309,028 additions and 91,684 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/airbyte-ci-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ jobs:
- name: Fetch last commit id from remote branch [PULL REQUESTS]
if: github.event_name == 'pull_request'
id: fetch_last_commit_id_pr
run: echo "commit_id=$(git ls-remote --heads origin ${{ github.head_ref }} | cut -f 1)" >> $GITHUB_OUTPUT
run: echo "commit_id=$(git ls-remote --heads origin refs/heads/${{ github.head_ref }} | cut -f 1)" >> $GITHUB_OUTPUT
- name: Fetch last commit id from remote branch [WORKFLOW DISPATCH]
if: github.event_name == 'workflow_dispatch'
id: fetch_last_commit_id_wd
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/cdk_connectors_tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ jobs:
- name: Fetch last commit id from remote branch [PULL REQUESTS]
if: github.event_name == 'pull_request'
id: fetch_last_commit_id_pr
run: echo "commit_id=$(git ls-remote --heads origin ${{ github.head_ref }} | cut -f 1)" >> $GITHUB_OUTPUT
run: echo "commit_id=$(git ls-remote --heads origin refs/heads/${{ github.head_ref }} | cut -f 1)" >> $GITHUB_OUTPUT
- name: Fetch last commit id from remote branch [WORKFLOW DISPATCH]
if: github.event_name == 'workflow_dispatch'
id: fetch_last_commit_id_wd
Expand Down
2 changes: 0 additions & 2 deletions .github/workflows/community_ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,6 @@ jobs:
uses: ./.github/actions/run-airbyte-ci
with:
context: "pull_request"
dagger_cloud_token: ${{ secrets.DAGGER_CLOUD_TOKEN_CACHE_4 }}
docker_hub_password: ${{ secrets.DOCKER_HUB_PASSWORD }}
docker_hub_username: ${{ secrets.DOCKER_HUB_USERNAME }}
gcp_gsm_credentials: ${{ secrets.GCP_GSM_CREDENTIALS }}
Expand Down Expand Up @@ -237,7 +236,6 @@ jobs:
uses: ./.github/actions/run-airbyte-ci
with:
context: "pull_request"
dagger_cloud_token: ${{ secrets.DAGGER_CLOUD_TOKEN_CACHE_4 }}
docker_hub_password: ${{ secrets.DOCKER_HUB_PASSWORD }}
docker_hub_username: ${{ secrets.DOCKER_HUB_USERNAME }}
gcp_gsm_credentials: ${{ secrets.GCP_GSM_CREDENTIALS }}
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/connectors_tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -95,7 +95,7 @@ jobs:
- name: Fetch last commit id from remote branch [PULL REQUESTS]
if: github.event_name == 'pull_request'
id: fetch_last_commit_id_pr
run: echo "commit_id=$(git ls-remote --heads origin ${{ github.head_ref }} | cut -f 1)" >> $GITHUB_OUTPUT
run: echo "commit_id=$(git ls-remote --heads origin refs/heads/${{ github.head_ref }} | cut -f 1)" >> $GITHUB_OUTPUT
- name: Fetch last commit id from remote branch [WORKFLOW DISPATCH]
if: github.event_name == 'workflow_dispatch'
id: fetch_last_commit_id_wd
Expand Down
25 changes: 21 additions & 4 deletions .github/workflows/connectors_up_to_date.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ on:
inputs:
connectors-options:
description: "Options to pass to the 'airbyte-ci connectors' command group."
default: "--concurrency=10 --language=python --language=low-code --language=manifest-only"
default: '--concurrency=10 --language=python --language=low-code --language=manifest-only --metadata-query="''-rc.'' not in data.dockerImageTag" --metadata-query="''source-declarative-manifest'' not in data.dockerRepository"'
auto-merge:
description: "Whether to auto-merge the PRs created by the action."
default: "false"
Expand All @@ -25,8 +25,9 @@ jobs:
steps:
- name: Checkout Airbyte
uses: actions/checkout@v4
- name: Run airbyte-ci connectors up-to-date
id: airbyte-ci-connectors-up-to-date
- name: Run airbyte-ci connectors up-to-date [WORKFLOW]
if: github.event_name == 'workflow_dispatch'
id: airbyte-ci-connectors-up-to-date-workflow-dispatch
uses: ./.github/actions/run-airbyte-ci
with:
context: "master"
Expand All @@ -39,4 +40,20 @@ jobs:
sentry_dsn: ${{ secrets.SENTRY_AIRBYTE_CI_DSN }}
s3_build_cache_access_key_id: ${{ secrets.SELF_RUNNER_AWS_ACCESS_KEY_ID }}
s3_build_cache_secret_key: ${{ secrets.SELF_RUNNER_AWS_SECRET_ACCESS_KEY }}
subcommand: "connectors ${{ github.event.inputs.connectors-options || '--concurrency=10 --language=python --language=low-code --support-level=community --support-level=certified' }} up-to-date --ignore-connector=source-declarative-manifest --create-prs ${{ github.event.inputs.auto-merge == 'false' && '' || '--auto-merge' }}"
subcommand: "connectors ${{ github.event.inputs.connectors-options }} up-to-date --create-prs ${{ github.event.inputs.auto-merge == 'false' && '' || '--auto-merge' }}"
- name: Run airbyte-ci connectors up-to-date [SCHEDULE]
if: github.event_name == 'schedule'
id: airbyte-ci-connectors-up-to-date-schedule
uses: ./.github/actions/run-airbyte-ci
with:
context: "master"
dagger_cloud_token: ${{ secrets.DAGGER_CLOUD_TOKEN_CACHE_3 }}
docker_hub_password: ${{ secrets.DOCKER_HUB_PASSWORD }}
docker_hub_username: ${{ secrets.DOCKER_HUB_USERNAME }}
gcp_gsm_credentials: ${{ secrets.GCP_GSM_CREDENTIALS }}
gcs_credentials: ${{ secrets.METADATA_SERVICE_PROD_GCS_CREDENTIALS }}
github_token: ${{ secrets.GH_PAT_MAINTENANCE_OSS }}
sentry_dsn: ${{ secrets.SENTRY_AIRBYTE_CI_DSN }}
s3_build_cache_access_key_id: ${{ secrets.SELF_RUNNER_AWS_ACCESS_KEY_ID }}
s3_build_cache_secret_key: ${{ secrets.SELF_RUNNER_AWS_SECRET_ACCESS_KEY }}
subcommand: 'connectors --concurrency=10 --language=python --language=low-code --support-level=community --support-level=certified --metadata-query="\"source-declarative-manifest\" not in data.dockerRepository" --metadata-query="\"-rc.\" not in data.dockerImageTag" up-to-date --create-prs --auto-merge'
4 changes: 2 additions & 2 deletions .github/workflows/connectors_version_increment_check.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ jobs:
name: Connectors Version Increment Check
runs-on: connector-test-large
if: github.event.pull_request.head.repo.fork != true
timeout-minutes: 12
timeout-minutes: 22
steps:
- name: Checkout Airbyte
uses: actions/checkout@v4
Expand All @@ -35,7 +35,7 @@ jobs:
- name: Fetch last commit id from remote branch [PULL REQUESTS]
if: github.event_name == 'pull_request'
id: fetch_last_commit_id_pr
run: echo "commit_id=$(git ls-remote --heads origin ${{ github.head_ref }} | cut -f 1)" >> $GITHUB_OUTPUT
run: echo "commit_id=$(git ls-remote --heads origin refs/heads/${{ github.head_ref }} | cut -f 1)" >> $GITHUB_OUTPUT
- name: Test connectors [PULL REQUESTS]
if: github.event_name == 'pull_request'
uses: ./.github/actions/run-airbyte-ci
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/live_tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ jobs:
uses: ./.github/actions/run-airbyte-ci
with:
context: "manual"
dagger_cloud_token: ${{ secrets.DAGGER_CLOUD_TOKEN_CACHE_4 }}
dagger_cloud_token: ${{ secrets.DAGGER_CLOUD_TOKEN_CACHE_3 }}
docker_hub_password: ${{ secrets.DOCKER_HUB_PASSWORD }}
docker_hub_username: ${{ secrets.DOCKER_HUB_USERNAME }}
gcp_gsm_credentials: ${{ secrets.GCP_GSM_CREDENTIALS }}
Expand Down
11 changes: 0 additions & 11 deletions .github/workflows/publish-bulk-cdk.yml
Original file line number Diff line number Diff line change
Expand Up @@ -76,17 +76,6 @@ jobs:
gradle-distribution-sha-256-sum-warning: false
arguments: --scan :airbyte-cdk:bulk:bulkCdkBuild

- name: Integration test Bulk CDK
uses: burrunan/gradle-cache-action@v1
env:
CI: true
with:
read-only: true
job-id: bulk-cdk-publish
concurrent: true
gradle-distribution-sha-256-sum-warning: false
arguments: --scan :airbyte-cdk:bulk:bulkCdkIntegrationTest

- name: Publish Poms and Jars to CloudRepo
uses: burrunan/gradle-cache-action@v1
env:
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/publish_connectors.yml
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ jobs:
uses: ./.github/actions/run-airbyte-ci
with:
context: "master"
dagger_cloud_token: ${{ secrets.DAGGER_CLOUD_TOKEN_CACHE_3 }}
dagger_cloud_token: ${{ secrets.DAGGER_CLOUD_TOKEN_CACHE_4 }}
docker_hub_password: ${{ secrets.DOCKER_HUB_PASSWORD }}
docker_hub_username: ${{ secrets.DOCKER_HUB_USERNAME }}
gcp_gsm_credentials: ${{ secrets.GCP_GSM_CREDENTIALS }}
Expand All @@ -53,7 +53,7 @@ jobs:
uses: ./.github/actions/run-airbyte-ci
with:
context: "manual"
dagger_cloud_token: ${{ secrets.DAGGER_CLOUD_TOKEN_CACHE_3 }}
dagger_cloud_token: ${{ secrets.DAGGER_CLOUD_TOKEN_CACHE_4 }}
docker_hub_password: ${{ secrets.DOCKER_HUB_PASSWORD }}
docker_hub_username: ${{ secrets.DOCKER_HUB_USERNAME }}
gcp_gsm_credentials: ${{ secrets.GCP_GSM_CREDENTIALS }}
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/regression_tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ jobs:
uses: ./.github/actions/run-airbyte-ci
with:
context: "manual"
dagger_cloud_token: ${{ secrets.DAGGER_CLOUD_TOKEN_CACHE_4 }}
dagger_cloud_token: ${{ secrets.DAGGER_CLOUD_TOKEN_CACHE_3 }}
docker_hub_password: ${{ secrets.DOCKER_HUB_PASSWORD }}
docker_hub_username: ${{ secrets.DOCKER_HUB_USERNAME }}
gcp_gsm_credentials: ${{ secrets.GCP_GSM_CREDENTIALS }}
Expand Down
4 changes: 0 additions & 4 deletions airbyte-cdk/bulk/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -61,10 +61,6 @@ allprojects {
}
}

tasks.register('bulkCdkIntegrationTest').configure {
dependsOn allprojects.collect {it.tasks.matching { it.name == 'integrationTest' }}
}

if (buildNumberFile.exists()) {
tasks.register('bulkCdkBuild').configure {
dependsOn allprojects.collect {it.tasks.named('build')}
Expand Down
4 changes: 4 additions & 0 deletions airbyte-cdk/bulk/core/base/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -50,3 +50,7 @@ dependencies {
testFixturesApi 'io.micronaut.test:micronaut-test-junit5:4.5.0'
testFixturesApi 'io.github.deblockt:json-diff:1.1.0'
}

test {
environment "PRESENT", "present-value"
}
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,6 @@ import io.airbyte.cdk.ssh.SshTunnelMethodConfiguration
interface SshTunnelConfiguration {
val realHost: String
val realPort: Int
val sshTunnel: SshTunnelMethodConfiguration
val sshTunnel: SshTunnelMethodConfiguration?
val sshConnectionOptions: SshConnectionOptions
}
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ internal constructor(
/** Creates an open [TunnelSession]. */
fun createTunnelSession(
remote: SshdSocketAddress,
sshTunnel: SshTunnelMethodConfiguration,
sshTunnel: SshTunnelMethodConfiguration?,
connectionOptions: SshConnectionOptions,
): TunnelSession {
if (sshTunnel is SshNoTunnelMethod) {
Expand All @@ -62,7 +62,8 @@ fun createTunnelSession(
log.info { "Creating SSH client session." }
val connectFuture: ConnectFuture =
when (sshTunnel) {
SshNoTunnelMethod -> TODO("unreachable code")
SshNoTunnelMethod,
null -> TODO("unreachable code")
is SshKeyAuthTunnelMethod ->
client.connect(sshTunnel.user.trim(), sshTunnel.host.trim(), sshTunnel.port)
is SshPasswordAuthTunnelMethod ->
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
airbyte:
file-transfer:
enabled: ${USE_FILE_TRANSFER:false}
staging-path: ${AIRBYTE_STAGING_DIRECTORY:/staging/files}
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
/*
* Copyright (c) 2024 Airbyte, Inc., all rights reserved.
*/

package io.airbyte.cdk.initialization

import io.micronaut.context.annotation.Bean
import io.micronaut.context.annotation.Factory
import io.micronaut.context.annotation.Value
import io.micronaut.test.extensions.junit5.annotation.MicronautTest
import jakarta.inject.Inject
import kotlin.test.assertEquals
import org.junit.jupiter.api.Test

@MicronautTest
class TestApplicationYaml {
@Inject lateinit var defaultValueBean: DefaultValueBean

@Test
fun testMainDefaultValue() {
assertEquals("/staging/files", defaultValueBean.stagingFolder)
assertEquals(false, defaultValueBean.fileTransferEnable)
}
}

data class DefaultValueBean(
val stagingFolder: String,
val fileTransferEnable: Boolean,
)

@Factory
class TestFactory {
@Bean
fun defaultValueBean(
@Value("\${airbyte.file-transfer.staging-path}") stagingFolder: String,
@Value("\${airbyte.file-transfer.enabled}") fileTransferEnable: Boolean,
): DefaultValueBean {
return DefaultValueBean(stagingFolder, fileTransferEnable)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ data object CliRunner {
/** Same as [source] but for destinations. */
fun destination(
op: String,
config: ConfigurationSpecification? = null,
configContents: String? = null,
catalog: ConfiguredAirbyteCatalog? = null,
state: List<AirbyteStateMessage>? = null,
inputStream: InputStream,
Expand All @@ -60,8 +60,9 @@ data object CliRunner {
.singleton(true)
.build()
val out = CliRunnerOutputStream()
val configPath: Path? = inputFileFromString(configContents)
val runnable: Runnable =
makeRunnable(op, config, catalog, state) { args: Array<String> ->
makeRunnable(op, configPath, catalog, state) { args: Array<String> ->
AirbyteDestinationRunner(
args,
featureFlags.systemEnv,
Expand All @@ -75,7 +76,7 @@ data object CliRunner {
/** Same as the other [destination] but with [AirbyteMessage] input. */
fun destination(
op: String,
config: ConfigurationSpecification? = null,
configContents: String? = null,
catalog: ConfiguredAirbyteCatalog? = null,
state: List<AirbyteStateMessage>? = null,
featureFlags: Set<FeatureFlag> = setOf(),
Expand All @@ -90,7 +91,14 @@ data object CliRunner {
baos.toByteArray()
}
val inputStream: InputStream = ByteArrayInputStream(inputJsonBytes)
return destination(op, config, catalog, state, inputStream, *featureFlags.toTypedArray())
return destination(
op,
configContents,
catalog,
state,
inputStream,
*featureFlags.toTypedArray()
)
}

private fun makeRunnable(
Expand All @@ -101,6 +109,16 @@ data object CliRunner {
connectorRunnerConstructor: (Array<String>) -> AirbyteConnectorRunner,
): Runnable {
val configFile: Path? = inputFile(config)
return makeRunnable(op, configFile, catalog, state, connectorRunnerConstructor)
}

private fun makeRunnable(
op: String,
configFile: Path?,
catalog: ConfiguredAirbyteCatalog?,
state: List<AirbyteStateMessage>?,
connectorRunnerConstructor: (Array<String>) -> AirbyteConnectorRunner,
): Runnable {
val catalogFile: Path? = inputFile(catalog)
val stateFile: Path? = inputFile(state)
val args: List<String> =
Expand All @@ -126,9 +144,10 @@ data object CliRunner {
get() = toSet().map { it.envVar.name to it.requiredEnvVarValue }.toMap()

private fun inputFile(contents: Any?): Path? =
contents?.let { inputFileFromString(Jsons.writeValueAsString(contents)) }

private fun inputFileFromString(contents: String?): Path? =
contents?.let {
Files.createTempFile(null, null).also { file ->
Files.writeString(file, Jsons.writeValueAsString(contents))
}
Files.createTempFile(null, null).also { file -> Files.writeString(file, contents) }
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,11 @@
package io.airbyte.cdk.discover

import io.airbyte.cdk.data.AirbyteSchemaType
import io.airbyte.cdk.data.IntCodec
import io.airbyte.cdk.data.JsonDecoder
import io.airbyte.cdk.data.JsonEncoder
import io.airbyte.cdk.data.JsonStringCodec
import io.airbyte.cdk.data.LeafAirbyteSchemaType
import io.airbyte.cdk.data.LongCodec
import io.airbyte.cdk.data.OffsetDateTimeCodec
import java.time.OffsetDateTime

Expand Down Expand Up @@ -63,7 +63,6 @@ interface MetaField : FieldOrMetaField {
enum class CommonMetaField(
override val type: FieldType,
) : MetaField {
CDC_LSN(CdcStringMetaFieldType),
CDC_UPDATED_AT(CdcOffsetDateTimeMetaFieldType),
CDC_DELETED_AT(CdcOffsetDateTimeMetaFieldType),
;
Expand All @@ -80,8 +79,8 @@ data object CdcStringMetaFieldType : LosslessFieldType {

data object CdcIntegerMetaFieldType : LosslessFieldType {
override val airbyteSchemaType: AirbyteSchemaType = LeafAirbyteSchemaType.INTEGER
override val jsonEncoder: JsonEncoder<Int> = IntCodec
override val jsonDecoder: JsonDecoder<Int> = IntCodec
override val jsonEncoder: JsonEncoder<Long> = LongCodec
override val jsonDecoder: JsonDecoder<Long> = LongCodec
}

data object CdcOffsetDateTimeMetaFieldType : LosslessFieldType {
Expand Down
Loading

0 comments on commit 8fc42ec

Please sign in to comment.